[ 462.922229] env[63345]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=63345) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 462.922647] env[63345]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=63345) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 462.922647] env[63345]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=63345) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 462.922988] env[63345]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 463.018334] env[63345]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=63345) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 463.027666] env[63345]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.009s {{(pid=63345) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 463.633943] env[63345]: INFO nova.virt.driver [None req-a7db1571-9c18-4c17-9773-345922eff3dc None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 463.709064] env[63345]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 463.709242] env[63345]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 463.709339] env[63345]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=63345) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 466.747833] env[63345]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-72e9bc94-c555-40bf-8a91-58986fd11d13 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.764314] env[63345]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=63345) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 466.764510] env[63345]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-1ae8adda-68f5-4730-a818-aa096e320637 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.796011] env[63345]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 56a33. [ 466.796193] env[63345]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.087s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 466.796669] env[63345]: INFO nova.virt.vmwareapi.driver [None req-a7db1571-9c18-4c17-9773-345922eff3dc None None] VMware vCenter version: 7.0.3 [ 466.800288] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdc9204-0e64-4c03-9c01-935c62dcab00 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.817832] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b3703c-f81c-4407-9ce4-4bcb73c8d86d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.823595] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73158c4-9112-4270-a7d1-97af54dc3e2d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.829952] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55abd43-d8bf-4091-8d81-99a686713b21 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.842526] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a865fd2b-f5fa-4174-9e96-ac2db399663c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.848276] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e618a33-4158-4d74-9460-68dd32885b19 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.878321] env[63345]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-55fbf00e-6aa0-4cae-b32b-eb60243b5630 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.883131] env[63345]: DEBUG nova.virt.vmwareapi.driver [None req-a7db1571-9c18-4c17-9773-345922eff3dc None None] Extension org.openstack.compute already exists. {{(pid=63345) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:227}} [ 466.885724] env[63345]: INFO nova.compute.provider_config [None req-a7db1571-9c18-4c17-9773-345922eff3dc None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 467.388872] env[63345]: DEBUG nova.context [None req-a7db1571-9c18-4c17-9773-345922eff3dc None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),4d04da4e-6f49-4eaa-bb23-839e4a3a006c(cell1) {{(pid=63345) load_cells /opt/stack/nova/nova/context.py:464}} [ 467.390991] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 467.391257] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 467.391919] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 467.392343] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] Acquiring lock "4d04da4e-6f49-4eaa-bb23-839e4a3a006c" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 467.392525] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] Lock "4d04da4e-6f49-4eaa-bb23-839e4a3a006c" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 467.393507] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] Lock "4d04da4e-6f49-4eaa-bb23-839e4a3a006c" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 467.413769] env[63345]: INFO dbcounter [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] Registered counter for database nova_cell0 [ 467.422255] env[63345]: INFO dbcounter [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] Registered counter for database nova_cell1 [ 467.425379] env[63345]: DEBUG oslo_db.sqlalchemy.engines [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63345) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 467.425748] env[63345]: DEBUG oslo_db.sqlalchemy.engines [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63345) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 467.430531] env[63345]: ERROR nova.db.main.api [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 467.430531] env[63345]: result = function(*args, **kwargs) [ 467.430531] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 467.430531] env[63345]: return func(*args, **kwargs) [ 467.430531] env[63345]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 467.430531] env[63345]: result = fn(*args, **kwargs) [ 467.430531] env[63345]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 467.430531] env[63345]: return f(*args, **kwargs) [ 467.430531] env[63345]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 467.430531] env[63345]: return db.service_get_minimum_version(context, binaries) [ 467.430531] env[63345]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 467.430531] env[63345]: _check_db_access() [ 467.430531] env[63345]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 467.430531] env[63345]: stacktrace = ''.join(traceback.format_stack()) [ 467.430531] env[63345]: [ 467.431509] env[63345]: ERROR nova.db.main.api [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 467.431509] env[63345]: result = function(*args, **kwargs) [ 467.431509] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 467.431509] env[63345]: return func(*args, **kwargs) [ 467.431509] env[63345]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 467.431509] env[63345]: result = fn(*args, **kwargs) [ 467.431509] env[63345]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 467.431509] env[63345]: return f(*args, **kwargs) [ 467.431509] env[63345]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 467.431509] env[63345]: return db.service_get_minimum_version(context, binaries) [ 467.431509] env[63345]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 467.431509] env[63345]: _check_db_access() [ 467.431509] env[63345]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 467.431509] env[63345]: stacktrace = ''.join(traceback.format_stack()) [ 467.431509] env[63345]: [ 467.432084] env[63345]: WARNING nova.objects.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 467.432084] env[63345]: WARNING nova.objects.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] Failed to get minimum service version for cell 4d04da4e-6f49-4eaa-bb23-839e4a3a006c [ 467.432432] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] Acquiring lock "singleton_lock" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 467.432621] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] Acquired lock "singleton_lock" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 467.432833] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] Releasing lock "singleton_lock" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 467.433161] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] Full set of CONF: {{(pid=63345) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 467.433303] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ******************************************************************************** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 467.433429] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] Configuration options gathered from: {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 467.433562] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 467.433749] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 467.433875] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ================================================================================ {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 467.434097] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] allow_resize_to_same_host = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.434268] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] arq_binding_timeout = 300 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.434399] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] backdoor_port = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.434526] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] backdoor_socket = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.434686] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] block_device_allocate_retries = 60 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.434847] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] block_device_allocate_retries_interval = 3 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.435025] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cert = self.pem {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.435197] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.435366] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] compute_monitors = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.435561] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] config_dir = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.435746] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] config_drive_format = iso9660 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.435884] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.436060] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] config_source = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.436232] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] console_host = devstack {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.436397] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] control_exchange = nova {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.436554] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cpu_allocation_ratio = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.436717] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] daemon = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.436886] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] debug = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.437057] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] default_access_ip_network_name = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.437229] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] default_availability_zone = nova {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.437384] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] default_ephemeral_format = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.437540] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] default_green_pool_size = 1000 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.437810] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.437980] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] default_schedule_zone = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.438153] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] disk_allocation_ratio = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.438313] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] enable_new_services = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.438491] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] enabled_apis = ['osapi_compute'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.438674] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] enabled_ssl_apis = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.438841] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] flat_injected = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.439006] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] force_config_drive = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.439170] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] force_raw_images = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.439337] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] graceful_shutdown_timeout = 5 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.439497] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] heal_instance_info_cache_interval = 60 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.439711] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] host = cpu-1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.439888] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] initial_cpu_allocation_ratio = 4.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.440061] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] initial_disk_allocation_ratio = 1.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.440222] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] initial_ram_allocation_ratio = 1.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.440433] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.440642] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] instance_build_timeout = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.440760] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] instance_delete_interval = 300 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.440930] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] instance_format = [instance: %(uuid)s] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.441110] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] instance_name_template = instance-%08x {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.441277] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] instance_usage_audit = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.441449] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] instance_usage_audit_period = month {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.441614] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.441779] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] instances_path = /opt/stack/data/nova/instances {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.441943] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] internal_service_availability_zone = internal {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.442114] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] key = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.442278] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] live_migration_retry_count = 30 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.442444] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] log_color = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.442609] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] log_config_append = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.442774] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.442932] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] log_dir = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.443099] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] log_file = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.443229] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] log_options = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.443389] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] log_rotate_interval = 1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.443558] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] log_rotate_interval_type = days {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.443724] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] log_rotation_type = none {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.443854] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.443982] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.444161] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.444328] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.444458] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.444620] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] long_rpc_timeout = 1800 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.444779] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] max_concurrent_builds = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.444937] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] max_concurrent_live_migrations = 1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.445107] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] max_concurrent_snapshots = 5 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.445266] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] max_local_block_devices = 3 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.445422] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] max_logfile_count = 30 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.445609] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] max_logfile_size_mb = 200 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.445773] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] maximum_instance_delete_attempts = 5 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.445942] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] metadata_listen = 0.0.0.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.446124] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] metadata_listen_port = 8775 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.446293] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] metadata_workers = 2 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.446455] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] migrate_max_retries = -1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.446619] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] mkisofs_cmd = genisoimage {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.446823] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] my_block_storage_ip = 10.180.1.21 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.446954] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] my_ip = 10.180.1.21 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.447169] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.447335] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] network_allocate_retries = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.447519] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.447717] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] osapi_compute_listen = 0.0.0.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.447890] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] osapi_compute_listen_port = 8774 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.448070] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] osapi_compute_unique_server_name_scope = {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.448244] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] osapi_compute_workers = 2 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.448408] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] password_length = 12 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.448580] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] periodic_enable = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.448754] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] periodic_fuzzy_delay = 60 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.448924] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] pointer_model = usbtablet {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.449103] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] preallocate_images = none {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.449266] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] publish_errors = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.449397] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] pybasedir = /opt/stack/nova {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.449555] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ram_allocation_ratio = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.449717] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] rate_limit_burst = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.449882] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] rate_limit_except_level = CRITICAL {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.450051] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] rate_limit_interval = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.450215] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] reboot_timeout = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.450373] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] reclaim_instance_interval = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.450527] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] record = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.450697] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] reimage_timeout_per_gb = 60 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.450856] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] report_interval = 120 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.451017] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] rescue_timeout = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.451180] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] reserved_host_cpus = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.451339] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] reserved_host_disk_mb = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.451494] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] reserved_host_memory_mb = 512 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.451654] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] reserved_huge_pages = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.451814] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] resize_confirm_window = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.451971] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] resize_fs_using_block_device = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.452140] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] resume_guests_state_on_host_boot = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.452309] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.452472] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] rpc_response_timeout = 60 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.452632] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] run_external_periodic_tasks = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.452799] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] running_deleted_instance_action = reap {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.452956] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] running_deleted_instance_poll_interval = 1800 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.453126] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] running_deleted_instance_timeout = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.453284] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] scheduler_instance_sync_interval = 120 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.453448] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] service_down_time = 720 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.453614] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] servicegroup_driver = db {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.453771] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] shell_completion = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.453927] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] shelved_offload_time = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.454095] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] shelved_poll_interval = 3600 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.454261] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] shutdown_timeout = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.454420] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] source_is_ipv6 = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.454577] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ssl_only = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.454814] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.454979] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] sync_power_state_interval = 600 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.455151] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] sync_power_state_pool_size = 1000 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.455316] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] syslog_log_facility = LOG_USER {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.455487] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] tempdir = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.455659] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] timeout_nbd = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.455828] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] transport_url = **** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.455988] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] update_resources_interval = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.456158] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] use_cow_images = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.456315] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] use_eventlog = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.456473] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] use_journal = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.456627] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] use_json = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.456784] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] use_rootwrap_daemon = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.456938] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] use_stderr = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.457103] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] use_syslog = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.457260] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vcpu_pin_set = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.457426] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vif_plugging_is_fatal = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.457626] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vif_plugging_timeout = 300 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.457803] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] virt_mkfs = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.457967] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] volume_usage_poll_interval = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.458141] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] watch_log_file = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.458308] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] web = /usr/share/spice-html5 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.458488] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_concurrency.disable_process_locking = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.458816] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.459009] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.459185] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.459357] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_metrics.metrics_process_name = {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.459526] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.459691] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.459872] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.auth_strategy = keystone {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.460048] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.compute_link_prefix = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.460228] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.460402] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.dhcp_domain = novalocal {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.460573] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.enable_instance_password = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.460740] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.glance_link_prefix = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.460906] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.461088] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.instance_list_cells_batch_strategy = distributed {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.461253] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.instance_list_per_project_cells = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.461425] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.list_records_by_skipping_down_cells = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.461588] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.local_metadata_per_cell = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.461756] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.max_limit = 1000 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.461923] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.metadata_cache_expiration = 15 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.462107] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.neutron_default_tenant_id = default {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.462282] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.response_validation = warn {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.462447] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.use_neutron_default_nets = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.462616] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.462786] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.vendordata_dynamic_failure_fatal = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.462953] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.463140] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.vendordata_dynamic_ssl_certfile = {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.463312] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.vendordata_dynamic_targets = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.463473] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.vendordata_jsonfile_path = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.463653] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api.vendordata_providers = ['StaticJSON'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.463844] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.backend = dogpile.cache.memcached {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.464016] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.backend_argument = **** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.464193] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.config_prefix = cache.oslo {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.464364] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.dead_timeout = 60.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.464529] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.debug_cache_backend = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.464689] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.enable_retry_client = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.464848] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.enable_socket_keepalive = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.465026] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.enabled = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.465191] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.enforce_fips_mode = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.465355] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.expiration_time = 600 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.465518] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.hashclient_retry_attempts = 2 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.465682] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.hashclient_retry_delay = 1.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.465846] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.memcache_dead_retry = 300 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.466008] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.memcache_password = **** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.466178] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.466337] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.466498] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.memcache_pool_maxsize = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.466662] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.memcache_pool_unused_timeout = 60 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.466822] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.memcache_sasl_enabled = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.466997] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.memcache_servers = ['localhost:11211'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.467174] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.memcache_socket_timeout = 1.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.467334] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.memcache_username = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.467498] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.proxies = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.467686] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.redis_db = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.467853] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.redis_password = **** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.468033] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.redis_sentinel_service_name = mymaster {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.468214] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.468383] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.redis_server = localhost:6379 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.468554] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.redis_socket_timeout = 1.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.468735] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.redis_username = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.468900] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.retry_attempts = 2 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.469076] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.retry_delay = 0.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.469242] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.socket_keepalive_count = 1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.469402] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.socket_keepalive_idle = 1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.469561] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.socket_keepalive_interval = 1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.469721] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.tls_allowed_ciphers = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.469877] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.tls_cafile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.470042] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.tls_certfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.470210] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.tls_enabled = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.470366] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cache.tls_keyfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.470533] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cinder.auth_section = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.470704] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cinder.auth_type = password {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.470866] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cinder.cafile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.471048] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cinder.catalog_info = volumev3::publicURL {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.471210] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cinder.certfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.471373] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cinder.collect_timing = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.471531] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cinder.cross_az_attach = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.471691] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cinder.debug = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.471849] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cinder.endpoint_template = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.472015] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cinder.http_retries = 3 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.472182] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cinder.insecure = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.472339] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cinder.keyfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.472506] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cinder.os_region_name = RegionOne {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.472666] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cinder.split_loggers = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.472823] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cinder.timeout = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.472994] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.473167] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] compute.cpu_dedicated_set = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.473325] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] compute.cpu_shared_set = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.473488] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] compute.image_type_exclude_list = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.473651] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] compute.live_migration_wait_for_vif_plug = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.473815] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] compute.max_concurrent_disk_ops = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.473977] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] compute.max_disk_devices_to_attach = -1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.474153] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.474322] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.474486] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] compute.resource_provider_association_refresh = 300 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.474646] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.474808] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] compute.shutdown_retry_interval = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.474986] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.475175] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] conductor.workers = 2 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.475354] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] console.allowed_origins = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.475514] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] console.ssl_ciphers = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.475683] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] console.ssl_minimum_version = default {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.475851] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] consoleauth.enforce_session_timeout = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.476026] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] consoleauth.token_ttl = 600 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.476197] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.cafile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.476353] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.certfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.476514] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.collect_timing = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.476671] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.connect_retries = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.476831] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.connect_retry_delay = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.476988] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.endpoint_override = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.477157] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.insecure = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.477312] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.keyfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.477466] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.max_version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.477643] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.min_version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.477808] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.region_name = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.477965] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.retriable_status_codes = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.478135] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.service_name = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.478302] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.service_type = accelerator {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.478460] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.split_loggers = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.478633] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.status_code_retries = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.478817] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.status_code_retry_delay = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.478951] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.timeout = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.479143] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.479302] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] cyborg.version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.479477] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.backend = sqlalchemy {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.479645] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.connection = **** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.479813] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.connection_debug = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.479979] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.connection_parameters = {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.480154] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.connection_recycle_time = 3600 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.480314] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.connection_trace = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.480471] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.db_inc_retry_interval = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.480630] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.db_max_retries = 20 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.480791] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.db_max_retry_interval = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.480949] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.db_retry_interval = 1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.481131] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.max_overflow = 50 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.481278] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.max_pool_size = 5 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.481434] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.max_retries = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.481598] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.mysql_sql_mode = TRADITIONAL {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.481756] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.mysql_wsrep_sync_wait = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.481908] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.pool_timeout = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.482075] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.retry_interval = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.482232] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.slave_connection = **** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.482390] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.sqlite_synchronous = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.482548] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] database.use_db_reconnect = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.482721] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.backend = sqlalchemy {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.482889] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.connection = **** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.483061] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.connection_debug = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.483233] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.connection_parameters = {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.483431] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.connection_recycle_time = 3600 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.483598] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.connection_trace = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.483762] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.db_inc_retry_interval = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.483924] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.db_max_retries = 20 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.484098] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.db_max_retry_interval = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.484261] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.db_retry_interval = 1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.484423] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.max_overflow = 50 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.484586] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.max_pool_size = 5 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.484747] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.max_retries = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.484914] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.485081] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.mysql_wsrep_sync_wait = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.485242] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.pool_timeout = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.485402] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.retry_interval = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.485558] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.slave_connection = **** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.485719] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] api_database.sqlite_synchronous = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.485893] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] devices.enabled_mdev_types = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.486078] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.486254] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ephemeral_storage_encryption.default_format = luks {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.486414] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ephemeral_storage_encryption.enabled = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.486577] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ephemeral_storage_encryption.key_size = 512 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.486746] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.api_servers = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.486907] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.cafile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.487076] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.certfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.487241] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.collect_timing = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.487400] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.connect_retries = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.487572] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.connect_retry_delay = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.487740] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.debug = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.487910] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.default_trusted_certificate_ids = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.488084] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.enable_certificate_validation = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.488247] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.enable_rbd_download = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.488404] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.endpoint_override = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.488587] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.insecure = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.488764] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.keyfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.488925] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.max_version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.489097] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.min_version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.489264] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.num_retries = 3 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.489433] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.rbd_ceph_conf = {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.489595] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.rbd_connect_timeout = 5 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.489763] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.rbd_pool = {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.489927] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.rbd_user = {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.490096] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.region_name = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.490257] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.retriable_status_codes = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.490414] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.service_name = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.490580] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.service_type = image {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.490740] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.split_loggers = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.490896] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.status_code_retries = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.491062] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.status_code_retry_delay = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.491241] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.timeout = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.491403] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.491567] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.verify_glance_signatures = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.491725] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] glance.version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.491887] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] guestfs.debug = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.492062] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.auth_section = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.492228] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.auth_type = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.492383] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.cafile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.492540] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.certfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.492704] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.collect_timing = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.492864] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.connect_retries = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.493029] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.connect_retry_delay = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.493191] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.endpoint_override = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.493350] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.insecure = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.493503] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.keyfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.493660] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.max_version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.493815] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.min_version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.493969] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.region_name = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.494164] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.retriable_status_codes = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.494290] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.service_name = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.494457] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.service_type = shared-file-system {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.494619] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.share_apply_policy_timeout = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.494778] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.split_loggers = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.494933] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.status_code_retries = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.495097] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.status_code_retry_delay = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.495256] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.timeout = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.495434] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.495591] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] manila.version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.495756] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] mks.enabled = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.496111] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.496302] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] image_cache.manager_interval = 2400 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.496470] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] image_cache.precache_concurrency = 1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.496636] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] image_cache.remove_unused_base_images = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.496804] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.496970] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.497158] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] image_cache.subdirectory_name = _base {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.497333] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.api_max_retries = 60 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.497498] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.api_retry_interval = 2 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.497681] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.auth_section = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.497848] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.auth_type = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.498015] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.cafile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.498178] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.certfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.498341] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.collect_timing = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.498504] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.conductor_group = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.498684] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.connect_retries = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.498850] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.connect_retry_delay = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.499013] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.endpoint_override = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.499180] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.insecure = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.499335] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.keyfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.499490] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.max_version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.499646] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.min_version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.499810] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.peer_list = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.499967] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.region_name = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.500135] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.retriable_status_codes = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.500298] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.serial_console_state_timeout = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.500454] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.service_name = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.500623] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.service_type = baremetal {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.500783] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.shard = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.500944] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.split_loggers = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.501113] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.status_code_retries = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.501273] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.status_code_retry_delay = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.501432] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.timeout = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.501612] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.501773] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ironic.version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.501952] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.502135] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] key_manager.fixed_key = **** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.502315] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.502474] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican.barbican_api_version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.502632] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican.barbican_endpoint = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.502800] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican.barbican_endpoint_type = public {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.502957] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican.barbican_region_name = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.503124] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican.cafile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.503282] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican.certfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.503443] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican.collect_timing = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.503599] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican.insecure = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.503757] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican.keyfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.503917] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican.number_of_retries = 60 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.504083] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican.retry_delay = 1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.504274] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican.send_service_user_token = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.504400] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican.split_loggers = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.504554] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican.timeout = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.504713] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican.verify_ssl = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.504868] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican.verify_ssl_path = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.505040] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican_service_user.auth_section = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.505204] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican_service_user.auth_type = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.505361] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican_service_user.cafile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.505514] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican_service_user.certfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.505682] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican_service_user.collect_timing = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.505867] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican_service_user.insecure = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.506037] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican_service_user.keyfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.506203] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican_service_user.split_loggers = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.506357] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] barbican_service_user.timeout = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.506520] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vault.approle_role_id = **** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.506676] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vault.approle_secret_id = **** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.506847] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vault.kv_mountpoint = secret {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.507010] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vault.kv_path = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.507179] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vault.kv_version = 2 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.507335] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vault.namespace = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.507492] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vault.root_token_id = **** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.507664] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vault.ssl_ca_crt_file = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.507835] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vault.timeout = 60.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.507996] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vault.use_ssl = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.508177] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.508342] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.auth_section = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.508503] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.auth_type = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.508684] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.cafile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.508848] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.certfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.509029] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.collect_timing = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.509181] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.connect_retries = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.509335] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.connect_retry_delay = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.509490] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.endpoint_override = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.509648] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.insecure = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.509802] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.keyfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.509953] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.max_version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.510117] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.min_version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.510272] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.region_name = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.510425] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.retriable_status_codes = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.510580] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.service_name = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.510748] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.service_type = identity {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.510908] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.split_loggers = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.511073] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.status_code_retries = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.511233] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.status_code_retry_delay = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.511392] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.timeout = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.511573] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.511730] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] keystone.version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.511929] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.connection_uri = {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.512101] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.cpu_mode = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.512269] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.cpu_model_extra_flags = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.512436] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.cpu_models = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.512605] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.cpu_power_governor_high = performance {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.512772] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.cpu_power_governor_low = powersave {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.512931] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.cpu_power_management = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.513110] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.513277] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.device_detach_attempts = 8 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.513437] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.device_detach_timeout = 20 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.513599] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.disk_cachemodes = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.513773] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.disk_prefix = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.513944] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.enabled_perf_events = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.514198] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.file_backed_memory = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.514243] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.gid_maps = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.514396] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.hw_disk_discard = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.514555] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.hw_machine_type = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.514727] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.images_rbd_ceph_conf = {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.514891] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.515062] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.515232] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.images_rbd_glance_store_name = {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.515399] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.images_rbd_pool = rbd {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.515564] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.images_type = default {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.515720] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.images_volume_group = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.515877] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.inject_key = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.516045] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.inject_partition = -2 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.516205] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.inject_password = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.516365] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.iscsi_iface = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.516522] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.iser_use_multipath = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.516683] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.live_migration_bandwidth = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.516844] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.live_migration_completion_timeout = 800 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.517013] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.live_migration_downtime = 500 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.517179] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.live_migration_downtime_delay = 75 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.517336] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.live_migration_downtime_steps = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.517491] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.live_migration_inbound_addr = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.517673] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.live_migration_permit_auto_converge = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.517841] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.live_migration_permit_post_copy = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.517999] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.live_migration_scheme = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.518184] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.live_migration_timeout_action = abort {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.518345] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.live_migration_tunnelled = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.518502] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.live_migration_uri = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.518689] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.live_migration_with_native_tls = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.518857] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.max_queues = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.519032] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.mem_stats_period_seconds = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.519267] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.519432] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.nfs_mount_options = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.519713] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.519900] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.num_aoe_discover_tries = 3 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.520093] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.num_iser_scan_tries = 5 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.520260] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.num_memory_encrypted_guests = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.520424] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.num_nvme_discover_tries = 5 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.520589] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.num_pcie_ports = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.520760] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.num_volume_scan_tries = 5 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.520927] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.pmem_namespaces = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.521099] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.quobyte_client_cfg = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.521377] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.521554] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.rbd_connect_timeout = 5 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.521711] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.521871] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.522038] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.rbd_secret_uuid = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.522196] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.rbd_user = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.522354] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.realtime_scheduler_priority = 1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.522521] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.remote_filesystem_transport = ssh {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.522677] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.rescue_image_id = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.522834] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.rescue_kernel_id = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.522991] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.rescue_ramdisk_id = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.523172] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.rng_dev_path = /dev/urandom {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.523329] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.rx_queue_size = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.523491] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.smbfs_mount_options = {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.523757] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.523926] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.snapshot_compression = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.524094] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.snapshot_image_format = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.524324] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.524492] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.sparse_logical_volumes = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.524653] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.swtpm_enabled = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.524821] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.swtpm_group = tss {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.524985] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.swtpm_user = tss {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.525169] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.sysinfo_serial = unique {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.525327] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.tb_cache_size = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.525483] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.tx_queue_size = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.525677] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.uid_maps = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.525867] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.use_virtio_for_bridges = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.526051] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.virt_type = kvm {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.526226] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.volume_clear = zero {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.526390] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.volume_clear_size = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.526554] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.volume_use_multipath = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.526712] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.vzstorage_cache_path = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.526880] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.527053] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.vzstorage_mount_group = qemu {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.527221] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.vzstorage_mount_opts = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.527386] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.527681] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.527870] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.vzstorage_mount_user = stack {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.528048] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.528225] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.auth_section = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.528394] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.auth_type = password {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.528562] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.cafile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.528738] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.certfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.528902] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.collect_timing = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.529071] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.connect_retries = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.529229] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.connect_retry_delay = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.529396] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.default_floating_pool = public {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.529554] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.endpoint_override = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.529715] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.extension_sync_interval = 600 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.529878] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.http_retries = 3 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.530069] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.insecure = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.530233] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.keyfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.530388] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.max_version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.530556] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.metadata_proxy_shared_secret = **** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.530722] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.min_version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.530980] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.ovs_bridge = br-int {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.531261] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.physnets = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.531548] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.region_name = RegionOne {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.531817] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.retriable_status_codes = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.532070] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.service_metadata_proxy = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.532253] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.service_name = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.532428] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.service_type = network {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.532592] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.split_loggers = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.532752] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.status_code_retries = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.532910] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.status_code_retry_delay = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.533080] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.timeout = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.533263] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.533423] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] neutron.version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.533593] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] notifications.bdms_in_notifications = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.533769] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] notifications.default_level = INFO {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.533940] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] notifications.notification_format = unversioned {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.534114] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] notifications.notify_on_state_change = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.534289] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.534458] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] pci.alias = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.534625] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] pci.device_spec = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.534795] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] pci.report_in_placement = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.534965] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.auth_section = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.535148] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.auth_type = password {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.535316] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.auth_url = http://10.180.1.21/identity {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.535475] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.cafile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.535629] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.certfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.535792] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.collect_timing = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.535947] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.connect_retries = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.536112] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.connect_retry_delay = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.536268] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.default_domain_id = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.536423] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.default_domain_name = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.536579] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.domain_id = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.536737] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.domain_name = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.536893] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.endpoint_override = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.537061] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.insecure = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.537221] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.keyfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.537375] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.max_version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.537531] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.min_version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.537750] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.password = **** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.537929] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.project_domain_id = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.538111] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.project_domain_name = Default {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.538281] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.project_id = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.538453] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.project_name = service {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.538635] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.region_name = RegionOne {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.538802] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.retriable_status_codes = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.538961] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.service_name = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.539141] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.service_type = placement {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.539305] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.split_loggers = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.539464] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.status_code_retries = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.539622] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.status_code_retry_delay = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.539780] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.system_scope = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.539936] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.timeout = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.540102] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.trust_id = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.540262] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.user_domain_id = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.540427] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.user_domain_name = Default {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.540586] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.user_id = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.540755] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.username = nova {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.540933] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.541103] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] placement.version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.541283] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] quota.cores = 20 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.541448] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] quota.count_usage_from_placement = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.541619] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.541797] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] quota.injected_file_content_bytes = 10240 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.541965] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] quota.injected_file_path_length = 255 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.542139] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] quota.injected_files = 5 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.542305] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] quota.instances = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.542466] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] quota.key_pairs = 100 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.542631] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] quota.metadata_items = 128 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.542796] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] quota.ram = 51200 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.542956] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] quota.recheck_quota = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.543133] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] quota.server_group_members = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.543299] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] quota.server_groups = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.543466] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.543629] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.543788] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] scheduler.image_metadata_prefilter = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.543947] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.544118] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] scheduler.max_attempts = 3 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.544283] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] scheduler.max_placement_results = 1000 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.544442] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.544602] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] scheduler.query_placement_for_image_type_support = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.544766] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.544934] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] scheduler.workers = 2 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.545115] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.545286] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.545460] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.545624] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.545787] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.545948] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.546118] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.546305] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.546469] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.host_subset_size = 1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.546630] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.546791] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.image_properties_default_architecture = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.546952] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.547127] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.isolated_hosts = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.547291] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.isolated_images = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.547452] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.max_instances_per_host = 50 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.547615] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.547776] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.547935] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.pci_in_placement = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.548102] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.548263] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.548421] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.548579] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.548736] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.548891] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.549072] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.track_instance_changes = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.549249] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.549415] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] metrics.required = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.549575] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] metrics.weight_multiplier = 1.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.549734] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] metrics.weight_of_unavailable = -10000.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.549892] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] metrics.weight_setting = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.550212] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.550385] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] serial_console.enabled = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.550557] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] serial_console.port_range = 10000:20000 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.550725] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.550889] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.551061] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] serial_console.serialproxy_port = 6083 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.551228] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] service_user.auth_section = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.551395] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] service_user.auth_type = password {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.551551] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] service_user.cafile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.551709] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] service_user.certfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.551866] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] service_user.collect_timing = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.552030] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] service_user.insecure = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.552189] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] service_user.keyfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.552355] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] service_user.send_service_user_token = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.552513] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] service_user.split_loggers = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.552669] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] service_user.timeout = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.552836] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] spice.agent_enabled = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.552994] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] spice.enabled = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.553308] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.553495] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] spice.html5proxy_host = 0.0.0.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.553663] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] spice.html5proxy_port = 6082 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.553825] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] spice.image_compression = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.553982] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] spice.jpeg_compression = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.554160] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] spice.playback_compression = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.554309] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] spice.require_secure = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.554473] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] spice.server_listen = 127.0.0.1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.554637] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.554815] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] spice.streaming_mode = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.554945] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] spice.zlib_compression = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.555121] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] upgrade_levels.baseapi = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.555291] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] upgrade_levels.compute = auto {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.555449] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] upgrade_levels.conductor = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.555604] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] upgrade_levels.scheduler = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.555767] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vendordata_dynamic_auth.auth_section = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.555925] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vendordata_dynamic_auth.auth_type = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.556091] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vendordata_dynamic_auth.cafile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.556250] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vendordata_dynamic_auth.certfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.556411] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vendordata_dynamic_auth.collect_timing = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.556568] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vendordata_dynamic_auth.insecure = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.556726] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vendordata_dynamic_auth.keyfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.556885] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vendordata_dynamic_auth.split_loggers = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.557050] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vendordata_dynamic_auth.timeout = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.557226] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.api_retry_count = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.557384] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.ca_file = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.557567] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.cache_prefix = devstack-image-cache {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.557723] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.cluster_name = testcl1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.557887] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.connection_pool_size = 10 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.558056] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.console_delay_seconds = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.558225] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.datastore_regex = ^datastore.* {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.558424] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.558594] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.host_password = **** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.558760] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.host_port = 443 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.558926] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.host_username = administrator@vsphere.local {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.559103] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.insecure = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.559269] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.integration_bridge = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.559430] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.maximum_objects = 100 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.559588] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.pbm_default_policy = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.559750] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.pbm_enabled = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.559907] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.pbm_wsdl_location = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.560085] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.560246] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.serial_port_proxy_uri = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.560404] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.serial_port_service_uri = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.560566] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.task_poll_interval = 0.5 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.560737] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.use_linked_clone = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.560903] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.vnc_keymap = en-us {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.561076] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.vnc_port = 5900 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.561241] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vmware.vnc_port_total = 10000 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.561424] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vnc.auth_schemes = ['none'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.561596] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vnc.enabled = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.561885] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.562081] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.562256] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vnc.novncproxy_port = 6080 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.562432] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vnc.server_listen = 127.0.0.1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.562601] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.562761] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vnc.vencrypt_ca_certs = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.562917] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vnc.vencrypt_client_cert = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.563084] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vnc.vencrypt_client_key = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.563676] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.563676] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.disable_deep_image_inspection = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.563676] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.disable_fallback_pcpu_query = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.563796] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.disable_group_policy_check_upcall = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.563882] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.564029] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.disable_rootwrap = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.564195] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.enable_numa_live_migration = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.564355] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.564511] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.564668] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.handle_virt_lifecycle_events = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.564827] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.libvirt_disable_apic = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.564994] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.never_download_image_if_on_rbd = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.565173] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.565333] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.565491] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.565647] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.565803] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.565959] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.566127] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.566284] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.566444] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.566624] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.566793] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] wsgi.client_socket_timeout = 900 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.566956] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] wsgi.default_pool_size = 1000 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.567133] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] wsgi.keep_alive = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.567300] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] wsgi.max_header_line = 16384 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.567460] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] wsgi.secure_proxy_ssl_header = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.567624] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] wsgi.ssl_ca_file = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.567784] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] wsgi.ssl_cert_file = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.567942] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] wsgi.ssl_key_file = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.568116] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] wsgi.tcp_keepidle = 600 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.568295] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.568458] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] zvm.ca_file = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.568619] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] zvm.cloud_connector_url = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.569280] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.569465] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] zvm.reachable_timeout = 300 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.569650] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_policy.enforce_new_defaults = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.570018] env[63345]: WARNING oslo_config.cfg [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 467.570210] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_policy.enforce_scope = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.570388] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_policy.policy_default_rule = default {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.570569] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.570755] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_policy.policy_file = policy.yaml {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.570919] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.571091] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.571254] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.571411] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.571569] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.571737] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.571909] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.572093] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] profiler.connection_string = messaging:// {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.572264] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] profiler.enabled = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.572431] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] profiler.es_doc_type = notification {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.572593] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] profiler.es_scroll_size = 10000 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.572761] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] profiler.es_scroll_time = 2m {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.572920] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] profiler.filter_error_trace = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.573097] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] profiler.hmac_keys = **** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.573265] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] profiler.sentinel_service_name = mymaster {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.573426] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] profiler.socket_timeout = 0.1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.573587] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] profiler.trace_requests = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.573747] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] profiler.trace_sqlalchemy = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.573919] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] profiler_jaeger.process_tags = {} {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.574095] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] profiler_jaeger.service_name_prefix = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.574260] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] profiler_otlp.service_name_prefix = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.574425] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] remote_debug.host = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.574584] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] remote_debug.port = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.574762] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.574922] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.575118] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.575253] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.575410] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.575567] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.575724] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.575881] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.576047] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.576217] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.hostname = devstack {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.576374] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.576541] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.576706] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.576873] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.577047] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.577215] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.577375] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.577548] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.577710] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.577872] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.578047] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.578213] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.578374] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.578538] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.578695] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.578854] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.579016] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.579178] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.579340] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.579502] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.ssl = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.579673] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.579841] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.580006] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.580180] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.580348] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.ssl_version = {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.580506] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.580690] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.580857] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_notifications.retry = -1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.581046] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.581223] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_messaging_notifications.transport_url = **** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.581390] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.auth_section = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.581549] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.auth_type = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.581707] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.cafile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.581859] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.certfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.582073] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.collect_timing = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.582187] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.connect_retries = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.582342] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.connect_retry_delay = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.582496] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.endpoint_id = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.582650] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.endpoint_override = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.582808] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.insecure = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.582959] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.keyfile = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.583123] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.max_version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.583278] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.min_version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.583431] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.region_name = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.583586] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.retriable_status_codes = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.583741] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.service_name = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.583895] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.service_type = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.584061] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.split_loggers = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.584219] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.status_code_retries = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.584373] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.status_code_retry_delay = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.584526] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.timeout = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.584680] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.valid_interfaces = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.584836] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_limit.version = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.584994] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_reports.file_event_handler = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.585222] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_reports.file_event_handler_interval = 1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.585323] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] oslo_reports.log_dir = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.585488] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.585644] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vif_plug_linux_bridge_privileged.group = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.585799] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.585960] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.586133] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.586290] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vif_plug_linux_bridge_privileged.user = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.586455] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.586610] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vif_plug_ovs_privileged.group = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.586766] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vif_plug_ovs_privileged.helper_command = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.586926] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.587094] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.587253] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] vif_plug_ovs_privileged.user = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.587419] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] os_vif_linux_bridge.flat_interface = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.587606] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.587771] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.587941] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.588124] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.588292] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.588456] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.588617] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] os_vif_linux_bridge.vlan_interface = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.588793] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.588960] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] os_vif_ovs.isolate_vif = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.589140] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.589305] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.589471] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.589638] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] os_vif_ovs.ovsdb_interface = native {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.589796] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] os_vif_ovs.per_port_bridge = False {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.589957] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] os_brick.lock_path = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.590130] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.590289] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] os_brick.wait_mpath_device_interval = 1 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.590453] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] privsep_osbrick.capabilities = [21] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.590608] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] privsep_osbrick.group = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.590762] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] privsep_osbrick.helper_command = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.590923] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.591092] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] privsep_osbrick.thread_pool_size = 8 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.591249] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] privsep_osbrick.user = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.591418] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.591573] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] nova_sys_admin.group = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.591727] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] nova_sys_admin.helper_command = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.591887] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.592054] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] nova_sys_admin.thread_pool_size = 8 {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.592210] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] nova_sys_admin.user = None {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.592336] env[63345]: DEBUG oslo_service.service [None req-3c0a1717-caec-40ce-adfb-e5dc0338bccf None None] ******************************************************************************** {{(pid=63345) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 467.592816] env[63345]: INFO nova.service [-] Starting compute node (version 30.1.0) [ 468.095588] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Getting list of instances from cluster (obj){ [ 468.095588] env[63345]: value = "domain-c8" [ 468.095588] env[63345]: _type = "ClusterComputeResource" [ 468.095588] env[63345]: } {{(pid=63345) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 468.096752] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ce7456-1cdc-4273-829c-24c28a6d0dd2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 468.106154] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Got total of 0 instances {{(pid=63345) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 468.106694] env[63345]: WARNING nova.virt.vmwareapi.driver [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 468.107179] env[63345]: INFO nova.virt.node [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Generated node identity fc35ddde-c15e-4ab8-bf77-a06ae0805b57 [ 468.107488] env[63345]: INFO nova.virt.node [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Wrote node identity fc35ddde-c15e-4ab8-bf77-a06ae0805b57 to /opt/stack/data/n-cpu-1/compute_id [ 468.610571] env[63345]: WARNING nova.compute.manager [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Compute nodes ['fc35ddde-c15e-4ab8-bf77-a06ae0805b57'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 469.616231] env[63345]: INFO nova.compute.manager [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 470.621820] env[63345]: WARNING nova.compute.manager [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 470.622207] env[63345]: DEBUG oslo_concurrency.lockutils [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 470.622326] env[63345]: DEBUG oslo_concurrency.lockutils [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 470.622457] env[63345]: DEBUG oslo_concurrency.lockutils [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 470.622614] env[63345]: DEBUG nova.compute.resource_tracker [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63345) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 470.623535] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50a5169-9047-4e63-9891-7437db5bf3ec {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.632140] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3c61b9-6c35-4ee7-9270-1b8d770fc021 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.646872] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e46fa37-100b-4670-9ce4-cd32bd1bfa7c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.653222] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af817b6-c670-446a-ad13-3164a5f1b427 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.683584] env[63345]: DEBUG nova.compute.resource_tracker [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181583MB free_disk=188GB free_vcpus=48 pci_devices=None {{(pid=63345) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 470.683782] env[63345]: DEBUG oslo_concurrency.lockutils [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 470.683919] env[63345]: DEBUG oslo_concurrency.lockutils [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 471.187690] env[63345]: WARNING nova.compute.resource_tracker [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] No compute node record for cpu-1:fc35ddde-c15e-4ab8-bf77-a06ae0805b57: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host fc35ddde-c15e-4ab8-bf77-a06ae0805b57 could not be found. [ 471.691560] env[63345]: INFO nova.compute.resource_tracker [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 [ 473.199825] env[63345]: DEBUG nova.compute.resource_tracker [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 473.200158] env[63345]: DEBUG nova.compute.resource_tracker [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 473.348859] env[63345]: INFO nova.scheduler.client.report [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] [req-03b60743-19a5-4969-982f-bdf932379460] Created resource provider record via placement API for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 473.365046] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be4084bc-03ea-47e8-8f03-bdfaa073bc8f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.372771] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9ad76c-de7c-4117-b588-202911d6ff83 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.402914] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261b23da-bf9d-4128-8dcc-410110eaa70d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.410398] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a750f1e1-a3a4-41cd-b79b-8876f4a4ccf8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.423405] env[63345]: DEBUG nova.compute.provider_tree [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 473.956956] env[63345]: DEBUG nova.scheduler.client.report [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 473.957203] env[63345]: DEBUG nova.compute.provider_tree [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 0 to 1 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 473.957342] env[63345]: DEBUG nova.compute.provider_tree [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 474.012584] env[63345]: DEBUG nova.compute.provider_tree [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 1 to 2 during operation: update_traits {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 474.517648] env[63345]: DEBUG nova.compute.resource_tracker [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63345) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 474.518050] env[63345]: DEBUG oslo_concurrency.lockutils [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.834s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 474.518092] env[63345]: DEBUG nova.service [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Creating RPC server for service compute {{(pid=63345) start /opt/stack/nova/nova/service.py:186}} [ 474.531051] env[63345]: DEBUG nova.service [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] Join ServiceGroup membership for this service compute {{(pid=63345) start /opt/stack/nova/nova/service.py:203}} [ 474.531230] env[63345]: DEBUG nova.servicegroup.drivers.db [None req-23cee962-ebe9-428a-b610-a1b245477f8e None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=63345) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 507.883114] env[63345]: DEBUG oslo_concurrency.lockutils [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Acquiring lock "d35db4e4-b25c-4811-a93f-cd337f6f9142" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 507.883114] env[63345]: DEBUG oslo_concurrency.lockutils [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Lock "d35db4e4-b25c-4811-a93f-cd337f6f9142" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 508.387644] env[63345]: DEBUG nova.compute.manager [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 508.927478] env[63345]: DEBUG oslo_concurrency.lockutils [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 508.927742] env[63345]: DEBUG oslo_concurrency.lockutils [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 508.929362] env[63345]: INFO nova.compute.claims [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 509.431601] env[63345]: DEBUG oslo_concurrency.lockutils [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "a6858a79-06b8-4110-9da4-e0e2a4a4e830" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 509.431900] env[63345]: DEBUG oslo_concurrency.lockutils [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "a6858a79-06b8-4110-9da4-e0e2a4a4e830" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 509.936570] env[63345]: DEBUG nova.compute.manager [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 510.004913] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3ec90d5-8e86-4287-9c75-a80a157f7742 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.015023] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae540c0-b703-495b-9b95-934135e61301 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.049682] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca653ccc-0199-4570-8166-5eb2094c5522 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.057691] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d919b1-0a36-446a-bf3a-210086cbf000 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.073099] env[63345]: DEBUG nova.compute.provider_tree [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 510.475876] env[63345]: DEBUG oslo_concurrency.lockutils [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 510.577863] env[63345]: DEBUG nova.scheduler.client.report [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 511.086227] env[63345]: DEBUG oslo_concurrency.lockutils [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.155s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 511.086227] env[63345]: DEBUG nova.compute.manager [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 511.090652] env[63345]: DEBUG oslo_concurrency.lockutils [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.616s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 511.092127] env[63345]: INFO nova.compute.claims [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 511.593027] env[63345]: DEBUG nova.compute.utils [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 511.594931] env[63345]: DEBUG nova.compute.manager [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 511.595310] env[63345]: DEBUG nova.network.neutron [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 511.897647] env[63345]: DEBUG nova.policy [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '861bf9930a634c58a3f941b74eac22ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5cbd9485f2cb44cba7674b00d33692c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 512.109595] env[63345]: DEBUG nova.compute.manager [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 512.184627] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-168f76a2-ab7e-49b3-9caa-5ab589907521 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.192309] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eceed31-d1f2-4660-81db-9a008dd46ab1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.229099] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359c344a-167e-4dd8-af39-21c3d5495ff9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.234192] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7acc3fb7-b754-4f35-9d7f-520cb53bf911 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.249539] env[63345]: DEBUG nova.compute.provider_tree [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 512.478542] env[63345]: DEBUG nova.network.neutron [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Successfully created port: b116df82-1473-4a0c-9d2c-585e5c778551 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 512.728015] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Acquiring lock "51d6db80-9d1f-4e38-a564-f587474f6294" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 512.728015] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Lock "51d6db80-9d1f-4e38-a564-f587474f6294" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 512.751362] env[63345]: DEBUG nova.scheduler.client.report [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 513.129260] env[63345]: DEBUG nova.compute.manager [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 513.159604] env[63345]: DEBUG nova.virt.hardware [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 513.159848] env[63345]: DEBUG nova.virt.hardware [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 513.160150] env[63345]: DEBUG nova.virt.hardware [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 513.160207] env[63345]: DEBUG nova.virt.hardware [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 513.160437] env[63345]: DEBUG nova.virt.hardware [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 513.160527] env[63345]: DEBUG nova.virt.hardware [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 513.160675] env[63345]: DEBUG nova.virt.hardware [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 513.161139] env[63345]: DEBUG nova.virt.hardware [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 513.162133] env[63345]: DEBUG nova.virt.hardware [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 513.162391] env[63345]: DEBUG nova.virt.hardware [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 513.162529] env[63345]: DEBUG nova.virt.hardware [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 513.164044] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a428c2-89e6-4fbe-b8d1-4629edd806cf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.173581] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d3f89a-47b4-4c6d-9600-566becd88613 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.184337] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Acquiring lock "aa21e116-3bf1-4574-8d4f-d0a1af692e8b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.184668] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Lock "aa21e116-3bf1-4574-8d4f-d0a1af692e8b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 513.200476] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04636b89-8551-4c0a-8137-1803dd7e3880 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.230754] env[63345]: DEBUG nova.compute.manager [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 513.257876] env[63345]: DEBUG oslo_concurrency.lockutils [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.167s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 513.258441] env[63345]: DEBUG nova.compute.manager [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 513.698613] env[63345]: DEBUG nova.compute.manager [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 513.764195] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.765458] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 513.767544] env[63345]: INFO nova.compute.claims [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 513.770845] env[63345]: DEBUG nova.compute.utils [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 513.780619] env[63345]: DEBUG nova.compute.manager [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 513.780619] env[63345]: DEBUG nova.network.neutron [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 513.918780] env[63345]: DEBUG nova.policy [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6055500166344214a404427722503338', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dfc1248fb5ee4f798b6c59154d4cf623', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 514.135939] env[63345]: DEBUG oslo_concurrency.lockutils [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "f4e897ce-2df5-40ae-99a8-11cac4902588" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.136410] env[63345]: DEBUG oslo_concurrency.lockutils [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "f4e897ce-2df5-40ae-99a8-11cac4902588" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 514.226208] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.282314] env[63345]: DEBUG nova.compute.manager [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 514.643315] env[63345]: DEBUG nova.compute.manager [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 514.723345] env[63345]: DEBUG nova.network.neutron [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Successfully created port: 1bb7a991-b363-43c0-8650-31586ccda3de {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 514.915231] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771246ce-3d87-42ae-9fbf-d68ca2caad05 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.922837] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec552ac-8ec3-4a19-8d84-7396699027ee {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.955063] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d7e830-ccda-4f23-9a15-27d608fa1a03 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.964276] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b674f63-ab8e-4462-aab9-9ef2b6dd0158 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.981250] env[63345]: DEBUG nova.compute.provider_tree [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 515.172218] env[63345]: DEBUG oslo_concurrency.lockutils [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.301183] env[63345]: DEBUG nova.compute.manager [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 515.345310] env[63345]: DEBUG nova.virt.hardware [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 515.345310] env[63345]: DEBUG nova.virt.hardware [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 515.345310] env[63345]: DEBUG nova.virt.hardware [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 515.346368] env[63345]: DEBUG nova.virt.hardware [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 515.346368] env[63345]: DEBUG nova.virt.hardware [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 515.346368] env[63345]: DEBUG nova.virt.hardware [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 515.346368] env[63345]: DEBUG nova.virt.hardware [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 515.346572] env[63345]: DEBUG nova.virt.hardware [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 515.347034] env[63345]: DEBUG nova.virt.hardware [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 515.347034] env[63345]: DEBUG nova.virt.hardware [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 515.347529] env[63345]: DEBUG nova.virt.hardware [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 515.349029] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8875a2a3-61fd-42ee-88c5-822da2c9f42b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.359410] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435f8eb9-827a-45da-8f2b-5a5d6802cd28 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.464596] env[63345]: ERROR nova.compute.manager [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b116df82-1473-4a0c-9d2c-585e5c778551, please check neutron logs for more information. [ 515.464596] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 515.464596] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 515.464596] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 515.464596] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 515.464596] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 515.464596] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 515.464596] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 515.464596] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 515.464596] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 515.464596] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 515.464596] env[63345]: ERROR nova.compute.manager raise self.value [ 515.464596] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 515.464596] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 515.464596] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 515.464596] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 515.465046] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 515.465046] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 515.465046] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b116df82-1473-4a0c-9d2c-585e5c778551, please check neutron logs for more information. [ 515.465046] env[63345]: ERROR nova.compute.manager [ 515.465046] env[63345]: Traceback (most recent call last): [ 515.465046] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 515.465046] env[63345]: listener.cb(fileno) [ 515.465046] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 515.465046] env[63345]: result = function(*args, **kwargs) [ 515.465046] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 515.465046] env[63345]: return func(*args, **kwargs) [ 515.465046] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 515.465046] env[63345]: raise e [ 515.465046] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 515.465046] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 515.465046] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 515.465046] env[63345]: created_port_ids = self._update_ports_for_instance( [ 515.465046] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 515.465046] env[63345]: with excutils.save_and_reraise_exception(): [ 515.465046] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 515.465046] env[63345]: self.force_reraise() [ 515.465046] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 515.465046] env[63345]: raise self.value [ 515.465046] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 515.465046] env[63345]: updated_port = self._update_port( [ 515.465046] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 515.465046] env[63345]: _ensure_no_port_binding_failure(port) [ 515.465046] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 515.465046] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 515.467144] env[63345]: nova.exception.PortBindingFailed: Binding failed for port b116df82-1473-4a0c-9d2c-585e5c778551, please check neutron logs for more information. [ 515.467144] env[63345]: Removing descriptor: 15 [ 515.467144] env[63345]: ERROR nova.compute.manager [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b116df82-1473-4a0c-9d2c-585e5c778551, please check neutron logs for more information. [ 515.467144] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Traceback (most recent call last): [ 515.467144] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 515.467144] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] yield resources [ 515.467144] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 515.467144] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] self.driver.spawn(context, instance, image_meta, [ 515.467144] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 515.467144] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] self._vmops.spawn(context, instance, image_meta, injected_files, [ 515.467144] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 515.467144] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] vm_ref = self.build_virtual_machine(instance, [ 515.467544] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 515.467544] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] vif_infos = vmwarevif.get_vif_info(self._session, [ 515.467544] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 515.467544] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] for vif in network_info: [ 515.467544] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 515.467544] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] return self._sync_wrapper(fn, *args, **kwargs) [ 515.467544] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 515.467544] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] self.wait() [ 515.467544] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 515.467544] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] self[:] = self._gt.wait() [ 515.467544] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 515.467544] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] return self._exit_event.wait() [ 515.467544] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 515.467884] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] result = hub.switch() [ 515.467884] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 515.467884] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] return self.greenlet.switch() [ 515.467884] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 515.467884] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] result = function(*args, **kwargs) [ 515.467884] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 515.467884] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] return func(*args, **kwargs) [ 515.467884] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 515.467884] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] raise e [ 515.467884] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 515.467884] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] nwinfo = self.network_api.allocate_for_instance( [ 515.467884] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 515.467884] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] created_port_ids = self._update_ports_for_instance( [ 515.468553] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 515.468553] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] with excutils.save_and_reraise_exception(): [ 515.468553] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 515.468553] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] self.force_reraise() [ 515.468553] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 515.468553] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] raise self.value [ 515.468553] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 515.468553] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] updated_port = self._update_port( [ 515.468553] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 515.468553] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] _ensure_no_port_binding_failure(port) [ 515.468553] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 515.468553] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] raise exception.PortBindingFailed(port_id=port['id']) [ 515.468899] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] nova.exception.PortBindingFailed: Binding failed for port b116df82-1473-4a0c-9d2c-585e5c778551, please check neutron logs for more information. [ 515.468899] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] [ 515.468899] env[63345]: INFO nova.compute.manager [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Terminating instance [ 515.484242] env[63345]: DEBUG nova.scheduler.client.report [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 515.917500] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Acquiring lock "c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.917500] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Lock "c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 515.971073] env[63345]: DEBUG oslo_concurrency.lockutils [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Acquiring lock "refresh_cache-d35db4e4-b25c-4811-a93f-cd337f6f9142" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 515.971271] env[63345]: DEBUG oslo_concurrency.lockutils [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Acquired lock "refresh_cache-d35db4e4-b25c-4811-a93f-cd337f6f9142" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 515.971453] env[63345]: DEBUG nova.network.neutron [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 515.996048] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.229s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 515.996048] env[63345]: DEBUG nova.compute.manager [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 515.999713] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.771s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 515.999713] env[63345]: INFO nova.compute.claims [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 516.275206] env[63345]: DEBUG oslo_concurrency.lockutils [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Acquiring lock "5ef55aca-0714-4b34-85f2-b6d53f97c2d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.275206] env[63345]: DEBUG oslo_concurrency.lockutils [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Lock "5ef55aca-0714-4b34-85f2-b6d53f97c2d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 516.423779] env[63345]: DEBUG nova.compute.manager [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 516.508137] env[63345]: DEBUG nova.compute.utils [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 516.512526] env[63345]: DEBUG nova.compute.manager [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Not allocating networking since 'none' was specified. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 516.535178] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._sync_power_states {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 516.565411] env[63345]: DEBUG nova.network.neutron [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 516.778632] env[63345]: DEBUG nova.compute.manager [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 516.798236] env[63345]: DEBUG nova.network.neutron [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 516.956780] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.021627] env[63345]: DEBUG nova.compute.manager [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 517.039671] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Getting list of instances from cluster (obj){ [ 517.039671] env[63345]: value = "domain-c8" [ 517.039671] env[63345]: _type = "ClusterComputeResource" [ 517.039671] env[63345]: } {{(pid=63345) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 517.043519] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa129767-dd5c-43c3-ad69-a4c7e6e5666b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.074881] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Got total of 0 instances {{(pid=63345) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 517.075081] env[63345]: WARNING nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] While synchronizing instance power states, found 4 instances in the database and 0 instances on the hypervisor. [ 517.075396] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Triggering sync for uuid d35db4e4-b25c-4811-a93f-cd337f6f9142 {{(pid=63345) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 517.075941] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Triggering sync for uuid a6858a79-06b8-4110-9da4-e0e2a4a4e830 {{(pid=63345) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 517.075941] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Triggering sync for uuid 51d6db80-9d1f-4e38-a564-f587474f6294 {{(pid=63345) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 517.076065] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Triggering sync for uuid aa21e116-3bf1-4574-8d4f-d0a1af692e8b {{(pid=63345) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 517.076513] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "d35db4e4-b25c-4811-a93f-cd337f6f9142" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.076746] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "a6858a79-06b8-4110-9da4-e0e2a4a4e830" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.077032] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "51d6db80-9d1f-4e38-a564-f587474f6294" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.077311] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "aa21e116-3bf1-4574-8d4f-d0a1af692e8b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.077499] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 517.077937] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Getting list of instances from cluster (obj){ [ 517.077937] env[63345]: value = "domain-c8" [ 517.077937] env[63345]: _type = "ClusterComputeResource" [ 517.077937] env[63345]: } {{(pid=63345) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 517.079085] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ec746c-d297-4211-a224-ac1c86b9e3bd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.090650] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Got total of 0 instances {{(pid=63345) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 517.146155] env[63345]: DEBUG nova.compute.manager [req-6d221955-5412-4e89-a210-87e4fb4e4fc0 req-882b6220-e080-4cef-8799-874c84c19fed service nova] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Received event network-changed-b116df82-1473-4a0c-9d2c-585e5c778551 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 517.147534] env[63345]: DEBUG nova.compute.manager [req-6d221955-5412-4e89-a210-87e4fb4e4fc0 req-882b6220-e080-4cef-8799-874c84c19fed service nova] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Refreshing instance network info cache due to event network-changed-b116df82-1473-4a0c-9d2c-585e5c778551. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 517.147906] env[63345]: DEBUG oslo_concurrency.lockutils [req-6d221955-5412-4e89-a210-87e4fb4e4fc0 req-882b6220-e080-4cef-8799-874c84c19fed service nova] Acquiring lock "refresh_cache-d35db4e4-b25c-4811-a93f-cd337f6f9142" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 517.199647] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a316abfa-1720-47b8-9128-69e57202d78e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.212925] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d348644-b1f9-4ea9-9079-14e187c5cf0c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.255741] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4565cf5f-4982-43a4-bf29-f83ce769ef16 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.265360] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe27650-c93f-4fe7-8c84-b573371a127c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.286464] env[63345]: DEBUG nova.compute.provider_tree [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 517.304382] env[63345]: DEBUG oslo_concurrency.lockutils [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Releasing lock "refresh_cache-d35db4e4-b25c-4811-a93f-cd337f6f9142" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 517.304872] env[63345]: DEBUG nova.compute.manager [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 517.305139] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 517.305494] env[63345]: DEBUG oslo_concurrency.lockutils [req-6d221955-5412-4e89-a210-87e4fb4e4fc0 req-882b6220-e080-4cef-8799-874c84c19fed service nova] Acquired lock "refresh_cache-d35db4e4-b25c-4811-a93f-cd337f6f9142" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 517.305692] env[63345]: DEBUG nova.network.neutron [req-6d221955-5412-4e89-a210-87e4fb4e4fc0 req-882b6220-e080-4cef-8799-874c84c19fed service nova] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Refreshing network info cache for port b116df82-1473-4a0c-9d2c-585e5c778551 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 517.306940] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d19eea2b-2ec1-4d96-8d9a-8fe3375c033a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.318540] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f809b475-b88b-4590-b340-2a003019a557 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.331272] env[63345]: DEBUG oslo_concurrency.lockutils [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.346121] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d35db4e4-b25c-4811-a93f-cd337f6f9142 could not be found. [ 517.346378] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 517.347225] env[63345]: INFO nova.compute.manager [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Took 0.04 seconds to destroy the instance on the hypervisor. [ 517.347435] env[63345]: DEBUG oslo.service.loopingcall [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 517.347470] env[63345]: DEBUG nova.compute.manager [-] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 517.347581] env[63345]: DEBUG nova.network.neutron [-] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 517.420644] env[63345]: DEBUG nova.network.neutron [-] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 517.616922] env[63345]: ERROR nova.compute.manager [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1bb7a991-b363-43c0-8650-31586ccda3de, please check neutron logs for more information. [ 517.616922] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 517.616922] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 517.616922] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 517.616922] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 517.616922] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 517.616922] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 517.616922] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 517.616922] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 517.616922] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 517.616922] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 517.616922] env[63345]: ERROR nova.compute.manager raise self.value [ 517.616922] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 517.616922] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 517.616922] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 517.616922] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 517.617435] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 517.617435] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 517.617435] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1bb7a991-b363-43c0-8650-31586ccda3de, please check neutron logs for more information. [ 517.617435] env[63345]: ERROR nova.compute.manager [ 517.617435] env[63345]: Traceback (most recent call last): [ 517.617435] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 517.617435] env[63345]: listener.cb(fileno) [ 517.617435] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 517.617435] env[63345]: result = function(*args, **kwargs) [ 517.617435] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 517.617435] env[63345]: return func(*args, **kwargs) [ 517.617435] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 517.617435] env[63345]: raise e [ 517.617435] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 517.617435] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 517.617435] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 517.617435] env[63345]: created_port_ids = self._update_ports_for_instance( [ 517.617435] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 517.617435] env[63345]: with excutils.save_and_reraise_exception(): [ 517.617435] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 517.617435] env[63345]: self.force_reraise() [ 517.617435] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 517.617435] env[63345]: raise self.value [ 517.617435] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 517.617435] env[63345]: updated_port = self._update_port( [ 517.617435] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 517.617435] env[63345]: _ensure_no_port_binding_failure(port) [ 517.617435] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 517.617435] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 517.618248] env[63345]: nova.exception.PortBindingFailed: Binding failed for port 1bb7a991-b363-43c0-8650-31586ccda3de, please check neutron logs for more information. [ 517.618248] env[63345]: Removing descriptor: 16 [ 517.618712] env[63345]: ERROR nova.compute.manager [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1bb7a991-b363-43c0-8650-31586ccda3de, please check neutron logs for more information. [ 517.618712] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Traceback (most recent call last): [ 517.618712] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 517.618712] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] yield resources [ 517.618712] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 517.618712] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] self.driver.spawn(context, instance, image_meta, [ 517.618712] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 517.618712] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] self._vmops.spawn(context, instance, image_meta, injected_files, [ 517.618712] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 517.618712] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] vm_ref = self.build_virtual_machine(instance, [ 517.618712] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 517.619030] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] vif_infos = vmwarevif.get_vif_info(self._session, [ 517.619030] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 517.619030] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] for vif in network_info: [ 517.619030] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 517.619030] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] return self._sync_wrapper(fn, *args, **kwargs) [ 517.619030] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 517.619030] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] self.wait() [ 517.619030] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 517.619030] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] self[:] = self._gt.wait() [ 517.619030] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 517.619030] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] return self._exit_event.wait() [ 517.619030] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 517.619030] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] result = hub.switch() [ 517.623412] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 517.623412] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] return self.greenlet.switch() [ 517.623412] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 517.623412] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] result = function(*args, **kwargs) [ 517.623412] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 517.623412] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] return func(*args, **kwargs) [ 517.623412] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 517.623412] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] raise e [ 517.623412] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 517.623412] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] nwinfo = self.network_api.allocate_for_instance( [ 517.623412] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 517.623412] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] created_port_ids = self._update_ports_for_instance( [ 517.623412] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 517.623730] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] with excutils.save_and_reraise_exception(): [ 517.623730] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 517.623730] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] self.force_reraise() [ 517.623730] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 517.623730] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] raise self.value [ 517.623730] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 517.623730] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] updated_port = self._update_port( [ 517.623730] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 517.623730] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] _ensure_no_port_binding_failure(port) [ 517.623730] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 517.623730] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] raise exception.PortBindingFailed(port_id=port['id']) [ 517.623730] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] nova.exception.PortBindingFailed: Binding failed for port 1bb7a991-b363-43c0-8650-31586ccda3de, please check neutron logs for more information. [ 517.623730] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] [ 517.624045] env[63345]: INFO nova.compute.manager [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Terminating instance [ 517.792460] env[63345]: DEBUG nova.scheduler.client.report [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 517.851173] env[63345]: DEBUG nova.network.neutron [req-6d221955-5412-4e89-a210-87e4fb4e4fc0 req-882b6220-e080-4cef-8799-874c84c19fed service nova] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 517.925523] env[63345]: DEBUG nova.network.neutron [-] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 517.982033] env[63345]: DEBUG nova.network.neutron [req-6d221955-5412-4e89-a210-87e4fb4e4fc0 req-882b6220-e080-4cef-8799-874c84c19fed service nova] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 518.041359] env[63345]: DEBUG nova.compute.manager [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 518.085254] env[63345]: DEBUG nova.virt.hardware [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 518.085542] env[63345]: DEBUG nova.virt.hardware [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 518.086613] env[63345]: DEBUG nova.virt.hardware [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 518.087021] env[63345]: DEBUG nova.virt.hardware [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 518.087225] env[63345]: DEBUG nova.virt.hardware [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 518.087426] env[63345]: DEBUG nova.virt.hardware [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 518.087682] env[63345]: DEBUG nova.virt.hardware [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 518.087858] env[63345]: DEBUG nova.virt.hardware [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 518.088136] env[63345]: DEBUG nova.virt.hardware [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 518.089064] env[63345]: DEBUG nova.virt.hardware [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 518.089064] env[63345]: DEBUG nova.virt.hardware [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 518.095125] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2679af2f-b876-4cb3-89bd-77225e4b6849 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.107981] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2f4679-7962-4f1f-84e9-3d8c3e03cd56 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.128116] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Instance VIF info [] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 518.137905] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 518.138512] env[63345]: DEBUG oslo_concurrency.lockutils [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "refresh_cache-a6858a79-06b8-4110-9da4-e0e2a4a4e830" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 518.138686] env[63345]: DEBUG oslo_concurrency.lockutils [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquired lock "refresh_cache-a6858a79-06b8-4110-9da4-e0e2a4a4e830" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 518.138878] env[63345]: DEBUG nova.network.neutron [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 518.140465] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-16e18e81-b919-48a6-8e6f-89f594e4eac8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.154852] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Created folder: OpenStack in parent group-v4. [ 518.155170] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Creating folder: Project (b39cc4a9bf0443b4aa74d68fc8dfb13b). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 518.155602] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-026c9a59-a052-4a14-b4a6-200a7c9edee7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.168823] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Created folder: Project (b39cc4a9bf0443b4aa74d68fc8dfb13b) in parent group-v225918. [ 518.169182] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Creating folder: Instances. Parent ref: group-v225919. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 518.169525] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-572f90cb-fec2-4cdf-b3ef-8d22458f7c65 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.181355] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Created folder: Instances in parent group-v225919. [ 518.181737] env[63345]: DEBUG oslo.service.loopingcall [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 518.182050] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 518.183034] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-544f84f0-aa94-419f-a265-9e9df9d39ce7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.217220] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 518.217220] env[63345]: value = "task-1016612" [ 518.217220] env[63345]: _type = "Task" [ 518.217220] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 518.226425] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016612, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 518.298755] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.302s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 518.299875] env[63345]: DEBUG nova.compute.manager [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 518.303650] env[63345]: DEBUG oslo_concurrency.lockutils [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.130s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.305406] env[63345]: INFO nova.compute.claims [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 518.431064] env[63345]: INFO nova.compute.manager [-] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Took 1.08 seconds to deallocate network for instance. [ 518.434396] env[63345]: DEBUG nova.compute.claims [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 518.434637] env[63345]: DEBUG oslo_concurrency.lockutils [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.487348] env[63345]: DEBUG oslo_concurrency.lockutils [req-6d221955-5412-4e89-a210-87e4fb4e4fc0 req-882b6220-e080-4cef-8799-874c84c19fed service nova] Releasing lock "refresh_cache-d35db4e4-b25c-4811-a93f-cd337f6f9142" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 518.660460] env[63345]: DEBUG nova.network.neutron [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 518.732717] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016612, 'name': CreateVM_Task, 'duration_secs': 0.337817} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 518.734562] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 518.736724] env[63345]: DEBUG oslo_vmware.service [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1ba06d-af76-4a2c-b9f8-5a1596fdaecb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.748385] env[63345]: DEBUG nova.network.neutron [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 518.755339] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 518.755339] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 518.755339] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 518.755339] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6d611ca-d03f-40d2-96fe-80f9be2a0f2b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.759444] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Waiting for the task: (returnval){ [ 518.759444] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]523a5fdd-ce8b-64ef-c5f9-c9031436e5f0" [ 518.759444] env[63345]: _type = "Task" [ 518.759444] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 518.771670] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]523a5fdd-ce8b-64ef-c5f9-c9031436e5f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 518.812384] env[63345]: DEBUG nova.compute.utils [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 518.816711] env[63345]: DEBUG nova.compute.manager [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 518.818047] env[63345]: DEBUG nova.network.neutron [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 518.951229] env[63345]: DEBUG nova.policy [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a085cf3a30664abb8c5bd85164e0bfd1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c19245b0a3c4457c9e7674c2c6619a6b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 518.955995] env[63345]: DEBUG nova.compute.manager [req-21e941d6-7865-4b9a-b46e-f8634b922889 req-dcfd01a7-946c-4900-9668-7d8d89172263 service nova] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Received event network-changed-1bb7a991-b363-43c0-8650-31586ccda3de {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 518.956548] env[63345]: DEBUG nova.compute.manager [req-21e941d6-7865-4b9a-b46e-f8634b922889 req-dcfd01a7-946c-4900-9668-7d8d89172263 service nova] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Refreshing instance network info cache due to event network-changed-1bb7a991-b363-43c0-8650-31586ccda3de. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 518.956548] env[63345]: DEBUG oslo_concurrency.lockutils [req-21e941d6-7865-4b9a-b46e-f8634b922889 req-dcfd01a7-946c-4900-9668-7d8d89172263 service nova] Acquiring lock "refresh_cache-a6858a79-06b8-4110-9da4-e0e2a4a4e830" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 519.256782] env[63345]: DEBUG oslo_concurrency.lockutils [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Releasing lock "refresh_cache-a6858a79-06b8-4110-9da4-e0e2a4a4e830" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 519.257226] env[63345]: DEBUG nova.compute.manager [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 519.257409] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 519.257704] env[63345]: DEBUG oslo_concurrency.lockutils [req-21e941d6-7865-4b9a-b46e-f8634b922889 req-dcfd01a7-946c-4900-9668-7d8d89172263 service nova] Acquired lock "refresh_cache-a6858a79-06b8-4110-9da4-e0e2a4a4e830" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 519.257935] env[63345]: DEBUG nova.network.neutron [req-21e941d6-7865-4b9a-b46e-f8634b922889 req-dcfd01a7-946c-4900-9668-7d8d89172263 service nova] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Refreshing network info cache for port 1bb7a991-b363-43c0-8650-31586ccda3de {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 519.258959] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5de9c02a-ddcf-4d40-aff8-4837eb726789 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.284420] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478b3921-a2bf-459d-a62e-45f89159d095 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.311758] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 519.312333] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 519.312572] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 519.312934] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 519.313476] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 519.314371] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a6858a79-06b8-4110-9da4-e0e2a4a4e830 could not be found. [ 519.314570] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 519.314741] env[63345]: INFO nova.compute.manager [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Took 0.06 seconds to destroy the instance on the hypervisor. [ 519.314957] env[63345]: DEBUG oslo.service.loopingcall [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 519.315174] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5696b3b4-8aaa-4742-a761-0dff55c447d8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.317092] env[63345]: DEBUG nova.compute.manager [-] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 519.317473] env[63345]: DEBUG nova.network.neutron [-] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 519.321560] env[63345]: DEBUG nova.compute.manager [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 519.342021] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 519.342021] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 519.342021] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55cff759-6490-463b-9c44-ec040a332d48 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.347697] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30ce9fd2-090c-4b4c-8fc6-07dbf3e6c6c1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.357759] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Waiting for the task: (returnval){ [ 519.357759] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5256049e-5a47-9d76-51b2-a8fbfadf651e" [ 519.357759] env[63345]: _type = "Task" [ 519.357759] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 519.367232] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5256049e-5a47-9d76-51b2-a8fbfadf651e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 519.428493] env[63345]: DEBUG nova.network.neutron [-] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 519.464607] env[63345]: DEBUG nova.network.neutron [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Successfully created port: 80ef48d2-0611-4708-9b7a-7b609048be7e {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 519.523107] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Acquiring lock "cec6ec60-5e8a-4c31-ba75-001f3c1980f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.524207] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Lock "cec6ec60-5e8a-4c31-ba75-001f3c1980f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.527451] env[63345]: DEBUG oslo_concurrency.lockutils [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Acquiring lock "1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.529190] env[63345]: DEBUG oslo_concurrency.lockutils [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Lock "1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.555917] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fca5c6f-7f21-4426-b2df-500caef0b62f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.564848] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96966a9-b503-4d4c-bbfc-2c5ad738192c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.603067] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a412ad99-8f18-4974-830a-efa9bf35d351 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.612561] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111ec5f7-ff85-49f0-b297-c33675f35d66 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.630954] env[63345]: DEBUG nova.compute.provider_tree [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 519.806504] env[63345]: DEBUG nova.network.neutron [req-21e941d6-7865-4b9a-b46e-f8634b922889 req-dcfd01a7-946c-4900-9668-7d8d89172263 service nova] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 519.876851] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Preparing fetch location {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 519.877106] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Creating directory with path [datastore2] vmware_temp/baaf54b3-c679-4e56-92e8-435dd9aee530/2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 519.877317] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2372de6e-8abd-4c88-ba53-c18c7ede41b0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.899934] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Created directory with path [datastore2] vmware_temp/baaf54b3-c679-4e56-92e8-435dd9aee530/2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 519.900156] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Fetch image to [datastore2] vmware_temp/baaf54b3-c679-4e56-92e8-435dd9aee530/2ff49e1b-8f44-4332-bba9-777d55ff62c4/tmp-sparse.vmdk {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 519.900419] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Downloading image file data 2ff49e1b-8f44-4332-bba9-777d55ff62c4 to [datastore2] vmware_temp/baaf54b3-c679-4e56-92e8-435dd9aee530/2ff49e1b-8f44-4332-bba9-777d55ff62c4/tmp-sparse.vmdk on the data store datastore2 {{(pid=63345) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 519.902887] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d4ca2d-1fb3-4390-8370-a27ce87c0b86 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.911070] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f200bd-5a67-4dd9-a096-84121649f4c6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.925292] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a14423-b0a6-4106-aab2-b5dda349d235 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.935129] env[63345]: DEBUG nova.network.neutron [-] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 519.967015] env[63345]: INFO nova.compute.manager [-] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Took 0.65 seconds to deallocate network for instance. [ 519.967774] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98a4679-1833-424b-a5f4-e50d3b15033f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.972676] env[63345]: DEBUG nova.compute.claims [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 519.972957] env[63345]: DEBUG oslo_concurrency.lockutils [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.977240] env[63345]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-348c2924-b21e-499d-b4a2-5bf7d67d402d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.999960] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Downloading image file data 2ff49e1b-8f44-4332-bba9-777d55ff62c4 to the data store datastore2 {{(pid=63345) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 520.029778] env[63345]: DEBUG nova.compute.manager [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 520.033136] env[63345]: DEBUG nova.compute.manager [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 520.075472] env[63345]: DEBUG nova.network.neutron [req-21e941d6-7865-4b9a-b46e-f8634b922889 req-dcfd01a7-946c-4900-9668-7d8d89172263 service nova] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 520.088558] env[63345]: DEBUG oslo_vmware.rw_handles [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/baaf54b3-c679-4e56-92e8-435dd9aee530/2ff49e1b-8f44-4332-bba9-777d55ff62c4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=63345) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 520.150779] env[63345]: DEBUG nova.scheduler.client.report [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 520.331954] env[63345]: DEBUG nova.compute.manager [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 520.371111] env[63345]: DEBUG nova.virt.hardware [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 520.371443] env[63345]: DEBUG nova.virt.hardware [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 520.371697] env[63345]: DEBUG nova.virt.hardware [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 520.371948] env[63345]: DEBUG nova.virt.hardware [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 520.374570] env[63345]: DEBUG nova.virt.hardware [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 520.375038] env[63345]: DEBUG nova.virt.hardware [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 520.375108] env[63345]: DEBUG nova.virt.hardware [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 520.375238] env[63345]: DEBUG nova.virt.hardware [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 520.375423] env[63345]: DEBUG nova.virt.hardware [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 520.375583] env[63345]: DEBUG nova.virt.hardware [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 520.375750] env[63345]: DEBUG nova.virt.hardware [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 520.376702] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ca5b61-2154-4ab3-b644-952c62a9caf5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.395443] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad68c61f-a818-45c5-a2b5-2c2a61dce832 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.566861] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.574842] env[63345]: DEBUG oslo_concurrency.lockutils [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.582749] env[63345]: DEBUG oslo_concurrency.lockutils [req-21e941d6-7865-4b9a-b46e-f8634b922889 req-dcfd01a7-946c-4900-9668-7d8d89172263 service nova] Releasing lock "refresh_cache-a6858a79-06b8-4110-9da4-e0e2a4a4e830" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 520.656747] env[63345]: DEBUG oslo_concurrency.lockutils [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.354s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 520.657498] env[63345]: DEBUG nova.compute.manager [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 520.662068] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.706s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 520.664031] env[63345]: INFO nova.compute.claims [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 520.819389] env[63345]: DEBUG oslo_vmware.rw_handles [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Completed reading data from the image iterator. {{(pid=63345) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 520.819650] env[63345]: DEBUG oslo_vmware.rw_handles [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/baaf54b3-c679-4e56-92e8-435dd9aee530/2ff49e1b-8f44-4332-bba9-777d55ff62c4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=63345) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 520.852657] env[63345]: ERROR nova.compute.manager [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 80ef48d2-0611-4708-9b7a-7b609048be7e, please check neutron logs for more information. [ 520.852657] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 520.852657] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 520.852657] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 520.852657] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 520.852657] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 520.852657] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 520.852657] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 520.852657] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 520.852657] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 520.852657] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 520.852657] env[63345]: ERROR nova.compute.manager raise self.value [ 520.852657] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 520.852657] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 520.852657] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 520.852657] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 520.853884] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 520.853884] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 520.853884] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 80ef48d2-0611-4708-9b7a-7b609048be7e, please check neutron logs for more information. [ 520.853884] env[63345]: ERROR nova.compute.manager [ 520.853884] env[63345]: Traceback (most recent call last): [ 520.853884] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 520.853884] env[63345]: listener.cb(fileno) [ 520.853884] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 520.853884] env[63345]: result = function(*args, **kwargs) [ 520.853884] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 520.853884] env[63345]: return func(*args, **kwargs) [ 520.853884] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 520.853884] env[63345]: raise e [ 520.853884] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 520.853884] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 520.853884] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 520.853884] env[63345]: created_port_ids = self._update_ports_for_instance( [ 520.853884] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 520.853884] env[63345]: with excutils.save_and_reraise_exception(): [ 520.853884] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 520.853884] env[63345]: self.force_reraise() [ 520.853884] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 520.853884] env[63345]: raise self.value [ 520.853884] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 520.853884] env[63345]: updated_port = self._update_port( [ 520.853884] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 520.853884] env[63345]: _ensure_no_port_binding_failure(port) [ 520.853884] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 520.853884] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 520.854806] env[63345]: nova.exception.PortBindingFailed: Binding failed for port 80ef48d2-0611-4708-9b7a-7b609048be7e, please check neutron logs for more information. [ 520.854806] env[63345]: Removing descriptor: 16 [ 520.854806] env[63345]: ERROR nova.compute.manager [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 80ef48d2-0611-4708-9b7a-7b609048be7e, please check neutron logs for more information. [ 520.854806] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Traceback (most recent call last): [ 520.854806] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 520.854806] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] yield resources [ 520.854806] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 520.854806] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] self.driver.spawn(context, instance, image_meta, [ 520.854806] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 520.854806] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 520.854806] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 520.854806] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] vm_ref = self.build_virtual_machine(instance, [ 520.855287] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 520.855287] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] vif_infos = vmwarevif.get_vif_info(self._session, [ 520.855287] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 520.855287] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] for vif in network_info: [ 520.855287] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 520.855287] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] return self._sync_wrapper(fn, *args, **kwargs) [ 520.855287] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 520.855287] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] self.wait() [ 520.855287] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 520.855287] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] self[:] = self._gt.wait() [ 520.855287] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 520.855287] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] return self._exit_event.wait() [ 520.855287] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 520.856219] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] result = hub.switch() [ 520.856219] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 520.856219] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] return self.greenlet.switch() [ 520.856219] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 520.856219] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] result = function(*args, **kwargs) [ 520.856219] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 520.856219] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] return func(*args, **kwargs) [ 520.856219] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 520.856219] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] raise e [ 520.856219] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 520.856219] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] nwinfo = self.network_api.allocate_for_instance( [ 520.856219] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 520.856219] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] created_port_ids = self._update_ports_for_instance( [ 520.857131] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 520.857131] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] with excutils.save_and_reraise_exception(): [ 520.857131] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 520.857131] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] self.force_reraise() [ 520.857131] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 520.857131] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] raise self.value [ 520.857131] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 520.857131] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] updated_port = self._update_port( [ 520.857131] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 520.857131] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] _ensure_no_port_binding_failure(port) [ 520.857131] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 520.857131] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] raise exception.PortBindingFailed(port_id=port['id']) [ 520.857506] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] nova.exception.PortBindingFailed: Binding failed for port 80ef48d2-0611-4708-9b7a-7b609048be7e, please check neutron logs for more information. [ 520.857506] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] [ 520.857506] env[63345]: INFO nova.compute.manager [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Terminating instance [ 520.868267] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Downloaded image file data 2ff49e1b-8f44-4332-bba9-777d55ff62c4 to vmware_temp/baaf54b3-c679-4e56-92e8-435dd9aee530/2ff49e1b-8f44-4332-bba9-777d55ff62c4/tmp-sparse.vmdk on the data store datastore2 {{(pid=63345) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 520.870466] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Caching image {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 520.870466] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Copying Virtual Disk [datastore2] vmware_temp/baaf54b3-c679-4e56-92e8-435dd9aee530/2ff49e1b-8f44-4332-bba9-777d55ff62c4/tmp-sparse.vmdk to [datastore2] vmware_temp/baaf54b3-c679-4e56-92e8-435dd9aee530/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 520.871395] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7ae1738-69e9-4a7f-bbc4-60787e44853b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.880365] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Waiting for the task: (returnval){ [ 520.880365] env[63345]: value = "task-1016613" [ 520.880365] env[63345]: _type = "Task" [ 520.880365] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 520.890343] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016613, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 521.169054] env[63345]: DEBUG nova.compute.utils [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 521.175812] env[63345]: DEBUG nova.compute.manager [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 521.175812] env[63345]: DEBUG nova.network.neutron [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 521.236684] env[63345]: DEBUG nova.policy [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17d05413415247e784585aaa367481eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '80677040e91647d9afae9c71c48ed3f0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 521.361137] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Acquiring lock "refresh_cache-aa21e116-3bf1-4574-8d4f-d0a1af692e8b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 521.361688] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Acquired lock "refresh_cache-aa21e116-3bf1-4574-8d4f-d0a1af692e8b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 521.361688] env[63345]: DEBUG nova.network.neutron [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 521.391611] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016613, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 521.567163] env[63345]: DEBUG nova.network.neutron [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Successfully created port: 7a36bb25-76ac-447e-a3ad-bb832183380e {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 521.674233] env[63345]: DEBUG nova.compute.manager [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 521.864506] env[63345]: DEBUG nova.compute.manager [req-c942ae61-7266-450c-b334-19bb9f52c867 req-35fa61ed-d4e1-4428-8cc7-705af5d425b3 service nova] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Received event network-vif-deleted-b116df82-1473-4a0c-9d2c-585e5c778551 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 521.890530] env[63345]: DEBUG nova.network.neutron [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 521.896673] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016613, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.677086} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 521.897767] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Copied Virtual Disk [datastore2] vmware_temp/baaf54b3-c679-4e56-92e8-435dd9aee530/2ff49e1b-8f44-4332-bba9-777d55ff62c4/tmp-sparse.vmdk to [datastore2] vmware_temp/baaf54b3-c679-4e56-92e8-435dd9aee530/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 521.897992] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Deleting the datastore file [datastore2] vmware_temp/baaf54b3-c679-4e56-92e8-435dd9aee530/2ff49e1b-8f44-4332-bba9-777d55ff62c4/tmp-sparse.vmdk {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 521.898730] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a84f14bc-bc1e-4588-be67-ce4cbe49dc5b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.901126] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3027a4f-323b-4e36-8a98-68755c9be559 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.907711] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa386f7f-6466-4b66-9fe3-bf715ff2b6c4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.912073] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Waiting for the task: (returnval){ [ 521.912073] env[63345]: value = "task-1016614" [ 521.912073] env[63345]: _type = "Task" [ 521.912073] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 521.946622] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6e5c4c-d207-42a9-a608-40c6bb77be35 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.949209] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016614, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 521.954574] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bb99ff-b214-4e44-acf0-e30b8f0da1f4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.968214] env[63345]: DEBUG nova.compute.provider_tree [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 522.014735] env[63345]: DEBUG nova.network.neutron [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 522.242949] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Acquiring lock "64c4c933-2b89-409a-9b4c-eccc7f481b67" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 522.243245] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Lock "64c4c933-2b89-409a-9b4c-eccc7f481b67" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 522.443328] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016614, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026931} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 522.443963] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 522.443963] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Moving file from [datastore2] vmware_temp/baaf54b3-c679-4e56-92e8-435dd9aee530/2ff49e1b-8f44-4332-bba9-777d55ff62c4 to [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4. {{(pid=63345) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 522.444116] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-f3dbf6b4-a558-458c-8e6a-53ba9b84257a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.451266] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Waiting for the task: (returnval){ [ 522.451266] env[63345]: value = "task-1016615" [ 522.451266] env[63345]: _type = "Task" [ 522.451266] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 522.460476] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016615, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 522.474695] env[63345]: DEBUG nova.scheduler.client.report [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 522.523317] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Releasing lock "refresh_cache-aa21e116-3bf1-4574-8d4f-d0a1af692e8b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 522.526511] env[63345]: DEBUG nova.compute.manager [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 522.526511] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 522.526511] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-db4d9024-a94f-4dfa-bedb-e0c287d3c668 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.535871] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e06e664-cfa9-4bf9-9d06-492327a75ce4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.560795] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aa21e116-3bf1-4574-8d4f-d0a1af692e8b could not be found. [ 522.561802] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 522.562049] env[63345]: INFO nova.compute.manager [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 522.562313] env[63345]: DEBUG oslo.service.loopingcall [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 522.562534] env[63345]: DEBUG nova.compute.manager [-] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 522.562626] env[63345]: DEBUG nova.network.neutron [-] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 522.584443] env[63345]: ERROR nova.compute.manager [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7a36bb25-76ac-447e-a3ad-bb832183380e, please check neutron logs for more information. [ 522.584443] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 522.584443] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 522.584443] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 522.584443] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 522.584443] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 522.584443] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 522.584443] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 522.584443] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 522.584443] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 522.584443] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 522.584443] env[63345]: ERROR nova.compute.manager raise self.value [ 522.584443] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 522.584443] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 522.584443] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 522.584443] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 522.584908] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 522.584908] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 522.584908] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7a36bb25-76ac-447e-a3ad-bb832183380e, please check neutron logs for more information. [ 522.584908] env[63345]: ERROR nova.compute.manager [ 522.584908] env[63345]: Traceback (most recent call last): [ 522.584908] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 522.584908] env[63345]: listener.cb(fileno) [ 522.584908] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 522.584908] env[63345]: result = function(*args, **kwargs) [ 522.584908] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 522.584908] env[63345]: return func(*args, **kwargs) [ 522.584908] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 522.584908] env[63345]: raise e [ 522.584908] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 522.584908] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 522.584908] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 522.584908] env[63345]: created_port_ids = self._update_ports_for_instance( [ 522.584908] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 522.584908] env[63345]: with excutils.save_and_reraise_exception(): [ 522.584908] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 522.584908] env[63345]: self.force_reraise() [ 522.584908] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 522.584908] env[63345]: raise self.value [ 522.584908] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 522.584908] env[63345]: updated_port = self._update_port( [ 522.584908] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 522.584908] env[63345]: _ensure_no_port_binding_failure(port) [ 522.584908] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 522.584908] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 522.586984] env[63345]: nova.exception.PortBindingFailed: Binding failed for port 7a36bb25-76ac-447e-a3ad-bb832183380e, please check neutron logs for more information. [ 522.586984] env[63345]: Removing descriptor: 16 [ 522.588858] env[63345]: DEBUG nova.network.neutron [-] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 522.671153] env[63345]: DEBUG nova.compute.manager [req-0768b810-dfa5-45e4-81e9-cda8b7af6e8c req-1b19eba8-6a78-46a2-81e2-9148a7c2d996 service nova] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Received event network-vif-deleted-1bb7a991-b363-43c0-8650-31586ccda3de {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 522.671350] env[63345]: DEBUG nova.compute.manager [req-0768b810-dfa5-45e4-81e9-cda8b7af6e8c req-1b19eba8-6a78-46a2-81e2-9148a7c2d996 service nova] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Received event network-changed-80ef48d2-0611-4708-9b7a-7b609048be7e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 522.671503] env[63345]: DEBUG nova.compute.manager [req-0768b810-dfa5-45e4-81e9-cda8b7af6e8c req-1b19eba8-6a78-46a2-81e2-9148a7c2d996 service nova] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Refreshing instance network info cache due to event network-changed-80ef48d2-0611-4708-9b7a-7b609048be7e. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 522.672372] env[63345]: DEBUG oslo_concurrency.lockutils [req-0768b810-dfa5-45e4-81e9-cda8b7af6e8c req-1b19eba8-6a78-46a2-81e2-9148a7c2d996 service nova] Acquiring lock "refresh_cache-aa21e116-3bf1-4574-8d4f-d0a1af692e8b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 522.672372] env[63345]: DEBUG oslo_concurrency.lockutils [req-0768b810-dfa5-45e4-81e9-cda8b7af6e8c req-1b19eba8-6a78-46a2-81e2-9148a7c2d996 service nova] Acquired lock "refresh_cache-aa21e116-3bf1-4574-8d4f-d0a1af692e8b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 522.672372] env[63345]: DEBUG nova.network.neutron [req-0768b810-dfa5-45e4-81e9-cda8b7af6e8c req-1b19eba8-6a78-46a2-81e2-9148a7c2d996 service nova] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Refreshing network info cache for port 80ef48d2-0611-4708-9b7a-7b609048be7e {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 522.686693] env[63345]: DEBUG nova.compute.manager [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 522.711700] env[63345]: DEBUG nova.virt.hardware [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 522.712459] env[63345]: DEBUG nova.virt.hardware [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 522.712459] env[63345]: DEBUG nova.virt.hardware [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 522.712459] env[63345]: DEBUG nova.virt.hardware [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 522.712459] env[63345]: DEBUG nova.virt.hardware [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 522.712590] env[63345]: DEBUG nova.virt.hardware [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 522.712800] env[63345]: DEBUG nova.virt.hardware [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 522.712925] env[63345]: DEBUG nova.virt.hardware [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 522.713123] env[63345]: DEBUG nova.virt.hardware [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 522.713289] env[63345]: DEBUG nova.virt.hardware [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 522.713461] env[63345]: DEBUG nova.virt.hardware [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 522.714372] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ae8c8a-c098-4432-b82b-504d73c4feed {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.724898] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-421bf1ee-168e-4680-8849-7f5afb48310c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.741183] env[63345]: ERROR nova.compute.manager [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7a36bb25-76ac-447e-a3ad-bb832183380e, please check neutron logs for more information. [ 522.741183] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Traceback (most recent call last): [ 522.741183] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 522.741183] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] yield resources [ 522.741183] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 522.741183] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] self.driver.spawn(context, instance, image_meta, [ 522.741183] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 522.741183] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] self._vmops.spawn(context, instance, image_meta, injected_files, [ 522.741183] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 522.741183] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] vm_ref = self.build_virtual_machine(instance, [ 522.741183] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 522.741530] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] vif_infos = vmwarevif.get_vif_info(self._session, [ 522.741530] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 522.741530] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] for vif in network_info: [ 522.741530] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 522.741530] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] return self._sync_wrapper(fn, *args, **kwargs) [ 522.741530] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 522.741530] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] self.wait() [ 522.741530] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 522.741530] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] self[:] = self._gt.wait() [ 522.741530] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 522.741530] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] return self._exit_event.wait() [ 522.741530] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 522.741530] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] current.throw(*self._exc) [ 522.741963] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 522.741963] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] result = function(*args, **kwargs) [ 522.741963] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 522.741963] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] return func(*args, **kwargs) [ 522.741963] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 522.741963] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] raise e [ 522.741963] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 522.741963] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] nwinfo = self.network_api.allocate_for_instance( [ 522.741963] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 522.741963] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] created_port_ids = self._update_ports_for_instance( [ 522.741963] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 522.741963] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] with excutils.save_and_reraise_exception(): [ 522.741963] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 522.742660] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] self.force_reraise() [ 522.742660] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 522.742660] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] raise self.value [ 522.742660] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 522.742660] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] updated_port = self._update_port( [ 522.742660] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 522.742660] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] _ensure_no_port_binding_failure(port) [ 522.742660] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 522.742660] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] raise exception.PortBindingFailed(port_id=port['id']) [ 522.742660] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] nova.exception.PortBindingFailed: Binding failed for port 7a36bb25-76ac-447e-a3ad-bb832183380e, please check neutron logs for more information. [ 522.742660] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] [ 522.742660] env[63345]: INFO nova.compute.manager [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Terminating instance [ 522.746998] env[63345]: DEBUG nova.compute.manager [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 522.965294] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016615, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.023306} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 522.965826] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] File moved {{(pid=63345) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 522.966711] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Cleaning up location [datastore2] vmware_temp/baaf54b3-c679-4e56-92e8-435dd9aee530 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 522.967392] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Deleting the datastore file [datastore2] vmware_temp/baaf54b3-c679-4e56-92e8-435dd9aee530 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 522.967755] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a043fd9-093c-4f23-a181-4dc294d2b982 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.975107] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Waiting for the task: (returnval){ [ 522.975107] env[63345]: value = "task-1016616" [ 522.975107] env[63345]: _type = "Task" [ 522.975107] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 522.980318] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.318s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 522.980866] env[63345]: DEBUG nova.compute.manager [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 522.985800] env[63345]: DEBUG oslo_concurrency.lockutils [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.655s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 522.987842] env[63345]: INFO nova.compute.claims [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 522.996512] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016616, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 523.088838] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.089363] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.089725] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Starting heal instance info cache {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 523.089934] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Rebuilding the list of instances to heal {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10261}} [ 523.094740] env[63345]: DEBUG nova.network.neutron [-] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.197904] env[63345]: DEBUG nova.network.neutron [req-0768b810-dfa5-45e4-81e9-cda8b7af6e8c req-1b19eba8-6a78-46a2-81e2-9148a7c2d996 service nova] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 523.249887] env[63345]: DEBUG oslo_concurrency.lockutils [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "refresh_cache-f4e897ce-2df5-40ae-99a8-11cac4902588" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 523.250623] env[63345]: DEBUG oslo_concurrency.lockutils [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquired lock "refresh_cache-f4e897ce-2df5-40ae-99a8-11cac4902588" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 523.251135] env[63345]: DEBUG nova.network.neutron [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 523.264977] env[63345]: DEBUG nova.network.neutron [req-0768b810-dfa5-45e4-81e9-cda8b7af6e8c req-1b19eba8-6a78-46a2-81e2-9148a7c2d996 service nova] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.285715] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.489883] env[63345]: DEBUG nova.compute.utils [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 523.489883] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016616, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025121} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 523.489883] env[63345]: DEBUG nova.compute.manager [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 523.489883] env[63345]: DEBUG nova.network.neutron [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 523.492120] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 523.497032] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef5755df-dc9e-4147-aec8-48156060147b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.506779] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Waiting for the task: (returnval){ [ 523.506779] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c25601-2e04-5843-8de6-515846955cad" [ 523.506779] env[63345]: _type = "Task" [ 523.506779] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 523.517501] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c25601-2e04-5843-8de6-515846955cad, 'name': SearchDatastore_Task, 'duration_secs': 0.008563} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 523.517501] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 523.517501] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 51d6db80-9d1f-4e38-a564-f587474f6294/51d6db80-9d1f-4e38-a564-f587474f6294.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 523.517979] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e41e8d06-5a10-4812-8aef-a0b233a3cd40 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.526571] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Waiting for the task: (returnval){ [ 523.526571] env[63345]: value = "task-1016617" [ 523.526571] env[63345]: _type = "Task" [ 523.526571] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 523.538527] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016617, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 523.572633] env[63345]: DEBUG nova.policy [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '79fbb393d434455ab2ac4818f328e3f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7cfa48245cc240a7a6f586298c78cc64', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 523.595407] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 523.595599] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 523.595796] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 523.595884] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 523.595975] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 523.596122] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 523.596248] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 523.596379] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Didn't find any instances for network info cache update. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10343}} [ 523.596722] env[63345]: INFO nova.compute.manager [-] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Took 1.03 seconds to deallocate network for instance. [ 523.597377] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.599485] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.599689] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.599895] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.600249] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.600519] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.601420] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63345) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 523.601420] env[63345]: DEBUG nova.compute.claims [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 523.601534] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.601862] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager.update_available_resource {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.769766] env[63345]: DEBUG oslo_concurrency.lockutils [req-0768b810-dfa5-45e4-81e9-cda8b7af6e8c req-1b19eba8-6a78-46a2-81e2-9148a7c2d996 service nova] Releasing lock "refresh_cache-aa21e116-3bf1-4574-8d4f-d0a1af692e8b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 523.769766] env[63345]: DEBUG nova.compute.manager [req-0768b810-dfa5-45e4-81e9-cda8b7af6e8c req-1b19eba8-6a78-46a2-81e2-9148a7c2d996 service nova] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Received event network-vif-deleted-80ef48d2-0611-4708-9b7a-7b609048be7e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 523.779988] env[63345]: DEBUG nova.network.neutron [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 523.874774] env[63345]: DEBUG nova.network.neutron [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.994272] env[63345]: DEBUG nova.compute.manager [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 524.038833] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016617, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491843} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 524.039147] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 51d6db80-9d1f-4e38-a564-f587474f6294/51d6db80-9d1f-4e38-a564-f587474f6294.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 524.039371] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 524.039621] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe01a798-71f5-480e-8cdb-b560cc54c7a8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.054294] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Waiting for the task: (returnval){ [ 524.054294] env[63345]: value = "task-1016618" [ 524.054294] env[63345]: _type = "Task" [ 524.054294] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 524.067600] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016618, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 524.105237] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.192814] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c10877-e6a0-4add-9f0a-c67bf6f88aba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.203387] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d8bb1c-5e8d-47b1-b9e8-98f5548723d7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.234332] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1fc7310-c41e-49f3-a6ac-17b88a7f927e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.243737] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3bc622-0369-4b2a-b2ff-89ec51e6f2fe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.246504] env[63345]: DEBUG nova.network.neutron [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Successfully created port: e75e1c47-4c22-4d2c-8365-bf365e8ee881 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 524.260758] env[63345]: DEBUG nova.compute.provider_tree [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 524.379211] env[63345]: DEBUG oslo_concurrency.lockutils [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Releasing lock "refresh_cache-f4e897ce-2df5-40ae-99a8-11cac4902588" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 524.379680] env[63345]: DEBUG nova.compute.manager [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 524.381104] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 524.381436] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b969eba4-314f-4788-9460-5c11936e34d3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.392213] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c47a3695-2775-469c-8595-d40497a20902 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.419634] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f4e897ce-2df5-40ae-99a8-11cac4902588 could not be found. [ 524.419831] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 524.419954] env[63345]: INFO nova.compute.manager [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Took 0.04 seconds to destroy the instance on the hypervisor. [ 524.420258] env[63345]: DEBUG oslo.service.loopingcall [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 524.420424] env[63345]: DEBUG nova.compute.manager [-] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 524.420507] env[63345]: DEBUG nova.network.neutron [-] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 524.466195] env[63345]: DEBUG nova.network.neutron [-] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 524.567850] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016618, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091802} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 524.568356] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 524.569501] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212d0575-41d1-4251-a2e5-2f675cf6d4d2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.595930] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] 51d6db80-9d1f-4e38-a564-f587474f6294/51d6db80-9d1f-4e38-a564-f587474f6294.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 524.596264] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f185edf-4485-4e09-86c8-a9aa46065513 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.624272] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Waiting for the task: (returnval){ [ 524.624272] env[63345]: value = "task-1016619" [ 524.624272] env[63345]: _type = "Task" [ 524.624272] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 524.633488] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016619, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 524.791104] env[63345]: ERROR nova.scheduler.client.report [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [req-f6fa3197-0726-4ecb-9616-ec7595c65b4b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f6fa3197-0726-4ecb-9616-ec7595c65b4b"}]} [ 524.811163] env[63345]: DEBUG nova.scheduler.client.report [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 524.827709] env[63345]: DEBUG nova.scheduler.client.report [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 524.828039] env[63345]: DEBUG nova.compute.provider_tree [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 524.842168] env[63345]: DEBUG nova.scheduler.client.report [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 524.867848] env[63345]: DEBUG nova.scheduler.client.report [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 524.970142] env[63345]: DEBUG nova.network.neutron [-] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 525.011586] env[63345]: DEBUG nova.compute.manager [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 525.041390] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5396d606-95c4-4fd0-a8ea-ee5d689132b7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.048116] env[63345]: DEBUG nova.virt.hardware [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 525.048116] env[63345]: DEBUG nova.virt.hardware [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 525.048116] env[63345]: DEBUG nova.virt.hardware [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 525.048339] env[63345]: DEBUG nova.virt.hardware [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 525.048339] env[63345]: DEBUG nova.virt.hardware [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 525.048339] env[63345]: DEBUG nova.virt.hardware [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 525.048459] env[63345]: DEBUG nova.virt.hardware [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 525.048769] env[63345]: DEBUG nova.virt.hardware [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 525.048769] env[63345]: DEBUG nova.virt.hardware [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 525.048916] env[63345]: DEBUG nova.virt.hardware [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 525.049105] env[63345]: DEBUG nova.virt.hardware [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 525.050268] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b2254a-a89f-4d61-81a5-3841a76136dc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.058026] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f65d889b-49ce-44d6-9fd2-72ffd470de6a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.066302] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503e0b22-0348-4289-a8b3-214057368b89 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.107231] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9975d8-8dc4-4ec7-a81a-88932d17af20 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.126081] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06decfb9-7faa-43aa-a953-cd33dd37a64a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.148158] env[63345]: DEBUG nova.compute.provider_tree [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 525.151206] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016619, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 525.472858] env[63345]: INFO nova.compute.manager [-] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Took 1.05 seconds to deallocate network for instance. [ 525.475815] env[63345]: DEBUG nova.compute.claims [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 525.476236] env[63345]: DEBUG oslo_concurrency.lockutils [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.646405] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016619, 'name': ReconfigVM_Task, 'duration_secs': 0.877996} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 525.647338] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Reconfigured VM instance instance-00000003 to attach disk [datastore2] 51d6db80-9d1f-4e38-a564-f587474f6294/51d6db80-9d1f-4e38-a564-f587474f6294.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 525.648750] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-12e4fb64-886f-42f7-b9ac-b39e3f2d61b5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.658294] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Waiting for the task: (returnval){ [ 525.658294] env[63345]: value = "task-1016620" [ 525.658294] env[63345]: _type = "Task" [ 525.658294] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 525.673271] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016620, 'name': Rename_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 525.678643] env[63345]: ERROR nova.scheduler.client.report [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [req-144b63f3-7ea4-4075-ba9a-baa97290b9da] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-144b63f3-7ea4-4075-ba9a-baa97290b9da"}]} [ 525.703203] env[63345]: DEBUG nova.scheduler.client.report [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 525.732437] env[63345]: DEBUG nova.scheduler.client.report [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 525.732437] env[63345]: DEBUG nova.compute.provider_tree [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 525.754920] env[63345]: DEBUG nova.scheduler.client.report [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 525.780474] env[63345]: DEBUG nova.scheduler.client.report [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 525.811164] env[63345]: DEBUG nova.compute.manager [req-8d37903e-c63b-4305-9be3-fbe0a708fb36 req-82707745-4385-4b65-83bb-f794055b2c8d service nova] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Received event network-changed-7a36bb25-76ac-447e-a3ad-bb832183380e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 525.812013] env[63345]: DEBUG nova.compute.manager [req-8d37903e-c63b-4305-9be3-fbe0a708fb36 req-82707745-4385-4b65-83bb-f794055b2c8d service nova] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Refreshing instance network info cache due to event network-changed-7a36bb25-76ac-447e-a3ad-bb832183380e. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 525.812404] env[63345]: DEBUG oslo_concurrency.lockutils [req-8d37903e-c63b-4305-9be3-fbe0a708fb36 req-82707745-4385-4b65-83bb-f794055b2c8d service nova] Acquiring lock "refresh_cache-f4e897ce-2df5-40ae-99a8-11cac4902588" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 525.812850] env[63345]: DEBUG oslo_concurrency.lockutils [req-8d37903e-c63b-4305-9be3-fbe0a708fb36 req-82707745-4385-4b65-83bb-f794055b2c8d service nova] Acquired lock "refresh_cache-f4e897ce-2df5-40ae-99a8-11cac4902588" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 525.813394] env[63345]: DEBUG nova.network.neutron [req-8d37903e-c63b-4305-9be3-fbe0a708fb36 req-82707745-4385-4b65-83bb-f794055b2c8d service nova] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Refreshing network info cache for port 7a36bb25-76ac-447e-a3ad-bb832183380e {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 525.821880] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Acquiring lock "e525b0c2-55f9-43f2-9d4f-faf46c0cd559" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.822267] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Lock "e525b0c2-55f9-43f2-9d4f-faf46c0cd559" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.976537] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d775a7-164a-4688-8337-71bfea820d1c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.986493] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62697ea-f108-4f32-b23d-befe5ce8b37b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.028524] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66232c7-3adc-4785-978e-80063404df53 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.035935] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14c979c-872d-49f9-805b-8aae4307deb5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.049727] env[63345]: DEBUG nova.compute.provider_tree [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 526.170123] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016620, 'name': Rename_Task, 'duration_secs': 0.145077} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 526.170421] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 526.170666] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d6967f4-97d1-4150-a522-446587156768 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.177525] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Waiting for the task: (returnval){ [ 526.177525] env[63345]: value = "task-1016621" [ 526.177525] env[63345]: _type = "Task" [ 526.177525] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 526.184781] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016621, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 526.341553] env[63345]: DEBUG nova.network.neutron [req-8d37903e-c63b-4305-9be3-fbe0a708fb36 req-82707745-4385-4b65-83bb-f794055b2c8d service nova] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 526.587592] env[63345]: DEBUG nova.scheduler.client.report [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 15 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 526.587804] env[63345]: DEBUG nova.compute.provider_tree [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 15 to 16 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 526.588031] env[63345]: DEBUG nova.compute.provider_tree [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 526.613597] env[63345]: DEBUG nova.network.neutron [req-8d37903e-c63b-4305-9be3-fbe0a708fb36 req-82707745-4385-4b65-83bb-f794055b2c8d service nova] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 526.691664] env[63345]: DEBUG oslo_vmware.api [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016621, 'name': PowerOnVM_Task, 'duration_secs': 0.44677} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 526.694230] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 526.694743] env[63345]: INFO nova.compute.manager [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Took 8.65 seconds to spawn the instance on the hypervisor. [ 526.694822] env[63345]: DEBUG nova.compute.manager [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 526.696875] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c14741a-765e-4f70-a75c-3fd9dbc220aa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.095581] env[63345]: DEBUG oslo_concurrency.lockutils [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.109s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 527.095581] env[63345]: DEBUG nova.compute.manager [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 527.101026] env[63345]: DEBUG oslo_concurrency.lockutils [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 8.665s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.116290] env[63345]: DEBUG oslo_concurrency.lockutils [req-8d37903e-c63b-4305-9be3-fbe0a708fb36 req-82707745-4385-4b65-83bb-f794055b2c8d service nova] Releasing lock "refresh_cache-f4e897ce-2df5-40ae-99a8-11cac4902588" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 527.116624] env[63345]: DEBUG nova.compute.manager [req-8d37903e-c63b-4305-9be3-fbe0a708fb36 req-82707745-4385-4b65-83bb-f794055b2c8d service nova] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Received event network-vif-deleted-7a36bb25-76ac-447e-a3ad-bb832183380e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 527.160872] env[63345]: ERROR nova.compute.manager [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e75e1c47-4c22-4d2c-8365-bf365e8ee881, please check neutron logs for more information. [ 527.160872] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 527.160872] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 527.160872] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 527.160872] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 527.160872] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 527.160872] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 527.160872] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 527.160872] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 527.160872] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 527.160872] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 527.160872] env[63345]: ERROR nova.compute.manager raise self.value [ 527.160872] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 527.160872] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 527.160872] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 527.160872] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 527.161363] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 527.161363] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 527.161363] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e75e1c47-4c22-4d2c-8365-bf365e8ee881, please check neutron logs for more information. [ 527.161363] env[63345]: ERROR nova.compute.manager [ 527.161363] env[63345]: Traceback (most recent call last): [ 527.161363] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 527.161363] env[63345]: listener.cb(fileno) [ 527.161363] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 527.161363] env[63345]: result = function(*args, **kwargs) [ 527.161363] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 527.161363] env[63345]: return func(*args, **kwargs) [ 527.161363] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 527.161363] env[63345]: raise e [ 527.161363] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 527.161363] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 527.161363] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 527.161363] env[63345]: created_port_ids = self._update_ports_for_instance( [ 527.161363] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 527.161363] env[63345]: with excutils.save_and_reraise_exception(): [ 527.161363] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 527.161363] env[63345]: self.force_reraise() [ 527.161363] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 527.161363] env[63345]: raise self.value [ 527.161363] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 527.161363] env[63345]: updated_port = self._update_port( [ 527.161363] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 527.161363] env[63345]: _ensure_no_port_binding_failure(port) [ 527.161363] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 527.161363] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 527.162128] env[63345]: nova.exception.PortBindingFailed: Binding failed for port e75e1c47-4c22-4d2c-8365-bf365e8ee881, please check neutron logs for more information. [ 527.162128] env[63345]: Removing descriptor: 15 [ 527.162128] env[63345]: ERROR nova.compute.manager [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e75e1c47-4c22-4d2c-8365-bf365e8ee881, please check neutron logs for more information. [ 527.162128] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Traceback (most recent call last): [ 527.162128] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 527.162128] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] yield resources [ 527.162128] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 527.162128] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] self.driver.spawn(context, instance, image_meta, [ 527.162128] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 527.162128] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] self._vmops.spawn(context, instance, image_meta, injected_files, [ 527.162128] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 527.162128] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] vm_ref = self.build_virtual_machine(instance, [ 527.162522] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 527.162522] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] vif_infos = vmwarevif.get_vif_info(self._session, [ 527.162522] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 527.162522] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] for vif in network_info: [ 527.162522] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 527.162522] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] return self._sync_wrapper(fn, *args, **kwargs) [ 527.162522] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 527.162522] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] self.wait() [ 527.162522] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 527.162522] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] self[:] = self._gt.wait() [ 527.162522] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 527.162522] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] return self._exit_event.wait() [ 527.162522] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 527.162872] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] result = hub.switch() [ 527.162872] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 527.162872] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] return self.greenlet.switch() [ 527.162872] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 527.162872] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] result = function(*args, **kwargs) [ 527.162872] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 527.162872] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] return func(*args, **kwargs) [ 527.162872] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 527.162872] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] raise e [ 527.162872] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 527.162872] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] nwinfo = self.network_api.allocate_for_instance( [ 527.162872] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 527.162872] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] created_port_ids = self._update_ports_for_instance( [ 527.163290] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 527.163290] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] with excutils.save_and_reraise_exception(): [ 527.163290] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 527.163290] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] self.force_reraise() [ 527.163290] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 527.163290] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] raise self.value [ 527.163290] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 527.163290] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] updated_port = self._update_port( [ 527.163290] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 527.163290] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] _ensure_no_port_binding_failure(port) [ 527.163290] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 527.163290] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] raise exception.PortBindingFailed(port_id=port['id']) [ 527.163899] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] nova.exception.PortBindingFailed: Binding failed for port e75e1c47-4c22-4d2c-8365-bf365e8ee881, please check neutron logs for more information. [ 527.163899] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] [ 527.163899] env[63345]: INFO nova.compute.manager [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Terminating instance [ 527.204192] env[63345]: DEBUG nova.compute.manager [req-db83319d-223c-45be-afb8-9f54431ce1a7 req-9501221c-146b-4f3e-b3ae-269d68e3a8ed service nova] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Received event network-changed-e75e1c47-4c22-4d2c-8365-bf365e8ee881 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 527.204406] env[63345]: DEBUG nova.compute.manager [req-db83319d-223c-45be-afb8-9f54431ce1a7 req-9501221c-146b-4f3e-b3ae-269d68e3a8ed service nova] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Refreshing instance network info cache due to event network-changed-e75e1c47-4c22-4d2c-8365-bf365e8ee881. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 527.204633] env[63345]: DEBUG oslo_concurrency.lockutils [req-db83319d-223c-45be-afb8-9f54431ce1a7 req-9501221c-146b-4f3e-b3ae-269d68e3a8ed service nova] Acquiring lock "refresh_cache-c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 527.204776] env[63345]: DEBUG oslo_concurrency.lockutils [req-db83319d-223c-45be-afb8-9f54431ce1a7 req-9501221c-146b-4f3e-b3ae-269d68e3a8ed service nova] Acquired lock "refresh_cache-c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 527.205200] env[63345]: DEBUG nova.network.neutron [req-db83319d-223c-45be-afb8-9f54431ce1a7 req-9501221c-146b-4f3e-b3ae-269d68e3a8ed service nova] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Refreshing network info cache for port e75e1c47-4c22-4d2c-8365-bf365e8ee881 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 527.213430] env[63345]: INFO nova.compute.manager [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Took 13.48 seconds to build instance. [ 527.614891] env[63345]: DEBUG nova.compute.utils [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 527.617464] env[63345]: DEBUG nova.compute.manager [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Not allocating networking since 'none' was specified. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 527.667860] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Acquiring lock "refresh_cache-c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 527.715472] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0aedce8b-6a95-4014-84a2-a7d27f7e8406 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Lock "51d6db80-9d1f-4e38-a564-f587474f6294" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.988s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 527.716841] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "51d6db80-9d1f-4e38-a564-f587474f6294" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 10.640s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.717361] env[63345]: INFO nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] During sync_power_state the instance has a pending task (networking). Skip. [ 527.717545] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "51d6db80-9d1f-4e38-a564-f587474f6294" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 527.740726] env[63345]: DEBUG nova.network.neutron [req-db83319d-223c-45be-afb8-9f54431ce1a7 req-9501221c-146b-4f3e-b3ae-269d68e3a8ed service nova] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 527.805715] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33f830a-a9cb-4941-916f-a4b7f8215f74 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.813713] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9471a697-7a91-4a36-98bf-951b1a3081ed {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.849669] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e2fd59-40df-432c-8ca9-7f9d1568f5ca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.857487] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57da4709-d34e-4910-98bb-6d1912123930 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.871055] env[63345]: DEBUG nova.compute.provider_tree [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 527.891180] env[63345]: DEBUG nova.network.neutron [req-db83319d-223c-45be-afb8-9f54431ce1a7 req-9501221c-146b-4f3e-b3ae-269d68e3a8ed service nova] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 527.914080] env[63345]: DEBUG nova.compute.manager [None req-00506ee5-66cd-40b2-bd6d-3004cc0ecb83 tempest-ServerDiagnosticsV248Test-1711407586 tempest-ServerDiagnosticsV248Test-1711407586-project-admin] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 527.915979] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5877ed91-8383-41fc-823b-1eec3c709fba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.924614] env[63345]: INFO nova.compute.manager [None req-00506ee5-66cd-40b2-bd6d-3004cc0ecb83 tempest-ServerDiagnosticsV248Test-1711407586 tempest-ServerDiagnosticsV248Test-1711407586-project-admin] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Retrieving diagnostics [ 527.925483] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57776743-b0ab-48b1-be0e-c226c2171e0c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.119638] env[63345]: DEBUG nova.compute.manager [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 528.221659] env[63345]: DEBUG nova.compute.manager [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 528.376019] env[63345]: DEBUG nova.scheduler.client.report [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 528.395474] env[63345]: DEBUG oslo_concurrency.lockutils [req-db83319d-223c-45be-afb8-9f54431ce1a7 req-9501221c-146b-4f3e-b3ae-269d68e3a8ed service nova] Releasing lock "refresh_cache-c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 528.395474] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Acquired lock "refresh_cache-c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 528.395474] env[63345]: DEBUG nova.network.neutron [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 528.755744] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.881767] env[63345]: DEBUG oslo_concurrency.lockutils [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.782s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 528.883296] env[63345]: ERROR nova.compute.manager [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b116df82-1473-4a0c-9d2c-585e5c778551, please check neutron logs for more information. [ 528.883296] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Traceback (most recent call last): [ 528.883296] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 528.883296] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] self.driver.spawn(context, instance, image_meta, [ 528.883296] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 528.883296] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] self._vmops.spawn(context, instance, image_meta, injected_files, [ 528.883296] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 528.883296] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] vm_ref = self.build_virtual_machine(instance, [ 528.883296] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 528.883296] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] vif_infos = vmwarevif.get_vif_info(self._session, [ 528.883296] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 528.883756] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] for vif in network_info: [ 528.883756] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 528.883756] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] return self._sync_wrapper(fn, *args, **kwargs) [ 528.883756] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 528.883756] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] self.wait() [ 528.883756] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 528.883756] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] self[:] = self._gt.wait() [ 528.883756] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 528.883756] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] return self._exit_event.wait() [ 528.883756] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 528.883756] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] result = hub.switch() [ 528.883756] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 528.883756] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] return self.greenlet.switch() [ 528.884167] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 528.884167] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] result = function(*args, **kwargs) [ 528.884167] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 528.884167] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] return func(*args, **kwargs) [ 528.884167] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 528.884167] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] raise e [ 528.884167] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 528.884167] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] nwinfo = self.network_api.allocate_for_instance( [ 528.884167] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 528.884167] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] created_port_ids = self._update_ports_for_instance( [ 528.884167] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 528.884167] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] with excutils.save_and_reraise_exception(): [ 528.884167] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 528.884505] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] self.force_reraise() [ 528.884505] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 528.884505] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] raise self.value [ 528.884505] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 528.884505] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] updated_port = self._update_port( [ 528.884505] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 528.884505] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] _ensure_no_port_binding_failure(port) [ 528.884505] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 528.884505] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] raise exception.PortBindingFailed(port_id=port['id']) [ 528.884505] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] nova.exception.PortBindingFailed: Binding failed for port b116df82-1473-4a0c-9d2c-585e5c778551, please check neutron logs for more information. [ 528.884505] env[63345]: ERROR nova.compute.manager [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] [ 528.884794] env[63345]: DEBUG nova.compute.utils [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Binding failed for port b116df82-1473-4a0c-9d2c-585e5c778551, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 528.885016] env[63345]: DEBUG oslo_concurrency.lockutils [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 8.912s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 528.898853] env[63345]: DEBUG nova.compute.manager [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Build of instance d35db4e4-b25c-4811-a93f-cd337f6f9142 was re-scheduled: Binding failed for port b116df82-1473-4a0c-9d2c-585e5c778551, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 528.902202] env[63345]: DEBUG nova.compute.manager [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 528.902202] env[63345]: DEBUG oslo_concurrency.lockutils [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Acquiring lock "refresh_cache-d35db4e4-b25c-4811-a93f-cd337f6f9142" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 528.902202] env[63345]: DEBUG oslo_concurrency.lockutils [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Acquired lock "refresh_cache-d35db4e4-b25c-4811-a93f-cd337f6f9142" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 528.902202] env[63345]: DEBUG nova.network.neutron [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 528.932170] env[63345]: DEBUG nova.network.neutron [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 529.030515] env[63345]: DEBUG nova.network.neutron [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 529.131965] env[63345]: DEBUG nova.compute.manager [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 529.180159] env[63345]: DEBUG nova.virt.hardware [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 529.180451] env[63345]: DEBUG nova.virt.hardware [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 529.180611] env[63345]: DEBUG nova.virt.hardware [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 529.180792] env[63345]: DEBUG nova.virt.hardware [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 529.180932] env[63345]: DEBUG nova.virt.hardware [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 529.181091] env[63345]: DEBUG nova.virt.hardware [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 529.181614] env[63345]: DEBUG nova.virt.hardware [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 529.181614] env[63345]: DEBUG nova.virt.hardware [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 529.181614] env[63345]: DEBUG nova.virt.hardware [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 529.181810] env[63345]: DEBUG nova.virt.hardware [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 529.181928] env[63345]: DEBUG nova.virt.hardware [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 529.183688] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca1ca3b-d311-4e2e-8a70-5ccd63f40eb3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.197166] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf26411-1457-440a-9f09-548fb3b5f94c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.217348] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Instance VIF info [] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 529.225031] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Creating folder: Project (48657968f7bd40d590ff347916aa027a). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 529.225441] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-470f4a21-3d4d-4476-b725-f661b2dc2c70 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.236324] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Created folder: Project (48657968f7bd40d590ff347916aa027a) in parent group-v225918. [ 529.237473] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Creating folder: Instances. Parent ref: group-v225922. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 529.237473] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6456071d-76c5-4b4d-890c-8a6b5fcc6abc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.245576] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Created folder: Instances in parent group-v225922. [ 529.245771] env[63345]: DEBUG oslo.service.loopingcall [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 529.246057] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 529.246279] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be61e48a-cb08-4ba3-819c-65e9c82f3f88 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.270691] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 529.270691] env[63345]: value = "task-1016624" [ 529.270691] env[63345]: _type = "Task" [ 529.270691] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 529.280858] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016624, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.418883] env[63345]: DEBUG nova.network.neutron [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 529.503052] env[63345]: DEBUG nova.network.neutron [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 529.536452] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Releasing lock "refresh_cache-c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 529.536452] env[63345]: DEBUG nova.compute.manager [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 529.536452] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 529.537113] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f6b94062-86d0-43a9-be89-c0528f1c8c78 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.545680] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2c338b-5485-44ef-9d38-ca69c31a9cae {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.571461] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65 could not be found. [ 529.571649] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 529.571848] env[63345]: INFO nova.compute.manager [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Took 0.04 seconds to destroy the instance on the hypervisor. [ 529.572104] env[63345]: DEBUG oslo.service.loopingcall [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 529.572413] env[63345]: DEBUG nova.compute.manager [-] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 529.572413] env[63345]: DEBUG nova.network.neutron [-] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 529.603907] env[63345]: DEBUG nova.network.neutron [-] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 529.637921] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1507b768-033d-45f2-b6c7-e70b5a9a16a6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.646632] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ca3e79-1e62-4578-a273-207910eacefd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.678575] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7466c0be-e9c1-4227-aa52-998f571ca5bb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.686278] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208f078f-869c-4bb9-a826-8fdbf5f42e9a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.701031] env[63345]: DEBUG nova.compute.provider_tree [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 529.705884] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Acquiring lock "d467124f-0b2b-4108-90d1-40f149e55ff0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.706156] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Lock "d467124f-0b2b-4108-90d1-40f149e55ff0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.784237] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016624, 'name': CreateVM_Task, 'duration_secs': 0.293201} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 529.784237] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 529.784237] env[63345]: DEBUG oslo_concurrency.lockutils [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 529.784237] env[63345]: DEBUG oslo_concurrency.lockutils [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 529.784237] env[63345]: DEBUG oslo_concurrency.lockutils [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 529.784237] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-268f13a2-36cb-4195-9940-a819da223298 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.790606] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 529.790606] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52339980-64d7-5ea1-ab4a-d3e92aa77dfc" [ 529.790606] env[63345]: _type = "Task" [ 529.790606] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 529.798347] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52339980-64d7-5ea1-ab4a-d3e92aa77dfc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.007721] env[63345]: DEBUG oslo_concurrency.lockutils [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Releasing lock "refresh_cache-d35db4e4-b25c-4811-a93f-cd337f6f9142" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 530.007970] env[63345]: DEBUG nova.compute.manager [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 530.008186] env[63345]: DEBUG nova.compute.manager [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 530.008374] env[63345]: DEBUG nova.network.neutron [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 530.028266] env[63345]: DEBUG nova.network.neutron [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 530.106220] env[63345]: DEBUG nova.network.neutron [-] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 530.207048] env[63345]: DEBUG nova.scheduler.client.report [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 530.303191] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52339980-64d7-5ea1-ab4a-d3e92aa77dfc, 'name': SearchDatastore_Task, 'duration_secs': 0.009193} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 530.303511] env[63345]: DEBUG oslo_concurrency.lockutils [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 530.303744] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 530.303972] env[63345]: DEBUG oslo_concurrency.lockutils [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 530.304136] env[63345]: DEBUG oslo_concurrency.lockutils [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 530.304335] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 530.304612] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91dc7698-d0ff-49b2-8439-b0e4bdfb52a9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.312634] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 530.312634] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 530.313354] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68498297-8107-449e-9114-9ebfcd947150 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.318281] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 530.318281] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]526b588f-08ba-ed51-792a-d2028e998cca" [ 530.318281] env[63345]: _type = "Task" [ 530.318281] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.323286] env[63345]: DEBUG nova.compute.manager [req-bbbe958a-33d5-4bd9-8ed4-792e14346ed4 req-a18fa009-7be1-4b22-9b42-39b9bef9520a service nova] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Received event network-vif-deleted-e75e1c47-4c22-4d2c-8365-bf365e8ee881 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 530.330804] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526b588f-08ba-ed51-792a-d2028e998cca, 'name': SearchDatastore_Task, 'duration_secs': 0.007575} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 530.331536] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9650745-8405-42b0-87aa-832829bf08f8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.338543] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 530.338543] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f02761-dccf-8985-ffcb-d3f35c8cfa97" [ 530.338543] env[63345]: _type = "Task" [ 530.338543] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.351014] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f02761-dccf-8985-ffcb-d3f35c8cfa97, 'name': SearchDatastore_Task, 'duration_secs': 0.008809} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 530.351014] env[63345]: DEBUG oslo_concurrency.lockutils [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 530.351014] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 5ef55aca-0714-4b34-85f2-b6d53f97c2d0/5ef55aca-0714-4b34-85f2-b6d53f97c2d0.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 530.352670] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-86b32915-8220-4009-832e-d4c16758b3f4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.358824] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 530.358824] env[63345]: value = "task-1016625" [ 530.358824] env[63345]: _type = "Task" [ 530.358824] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.366825] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016625, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.532992] env[63345]: DEBUG nova.network.neutron [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 530.610808] env[63345]: INFO nova.compute.manager [-] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Took 1.04 seconds to deallocate network for instance. [ 530.613537] env[63345]: DEBUG nova.compute.claims [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 530.613868] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.712571] env[63345]: DEBUG oslo_concurrency.lockutils [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.827s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 530.715197] env[63345]: ERROR nova.compute.manager [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1bb7a991-b363-43c0-8650-31586ccda3de, please check neutron logs for more information. [ 530.715197] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Traceback (most recent call last): [ 530.715197] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 530.715197] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] self.driver.spawn(context, instance, image_meta, [ 530.715197] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 530.715197] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] self._vmops.spawn(context, instance, image_meta, injected_files, [ 530.715197] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 530.715197] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] vm_ref = self.build_virtual_machine(instance, [ 530.715197] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 530.715197] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] vif_infos = vmwarevif.get_vif_info(self._session, [ 530.715197] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 530.716182] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] for vif in network_info: [ 530.716182] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 530.716182] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] return self._sync_wrapper(fn, *args, **kwargs) [ 530.716182] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 530.716182] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] self.wait() [ 530.716182] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 530.716182] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] self[:] = self._gt.wait() [ 530.716182] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 530.716182] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] return self._exit_event.wait() [ 530.716182] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 530.716182] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] result = hub.switch() [ 530.716182] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 530.716182] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] return self.greenlet.switch() [ 530.716523] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 530.716523] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] result = function(*args, **kwargs) [ 530.716523] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 530.716523] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] return func(*args, **kwargs) [ 530.716523] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 530.716523] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] raise e [ 530.716523] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 530.716523] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] nwinfo = self.network_api.allocate_for_instance( [ 530.716523] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 530.716523] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] created_port_ids = self._update_ports_for_instance( [ 530.716523] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 530.716523] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] with excutils.save_and_reraise_exception(): [ 530.716523] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 530.716846] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] self.force_reraise() [ 530.716846] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 530.716846] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] raise self.value [ 530.716846] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 530.716846] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] updated_port = self._update_port( [ 530.716846] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 530.716846] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] _ensure_no_port_binding_failure(port) [ 530.716846] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 530.716846] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] raise exception.PortBindingFailed(port_id=port['id']) [ 530.716846] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] nova.exception.PortBindingFailed: Binding failed for port 1bb7a991-b363-43c0-8650-31586ccda3de, please check neutron logs for more information. [ 530.716846] env[63345]: ERROR nova.compute.manager [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] [ 530.717128] env[63345]: DEBUG nova.compute.utils [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Binding failed for port 1bb7a991-b363-43c0-8650-31586ccda3de, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 530.717128] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.149s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 530.719398] env[63345]: INFO nova.compute.claims [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 530.722356] env[63345]: DEBUG nova.compute.manager [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Build of instance a6858a79-06b8-4110-9da4-e0e2a4a4e830 was re-scheduled: Binding failed for port 1bb7a991-b363-43c0-8650-31586ccda3de, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 530.723395] env[63345]: DEBUG nova.compute.manager [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 530.723395] env[63345]: DEBUG oslo_concurrency.lockutils [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "refresh_cache-a6858a79-06b8-4110-9da4-e0e2a4a4e830" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 530.723395] env[63345]: DEBUG oslo_concurrency.lockutils [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquired lock "refresh_cache-a6858a79-06b8-4110-9da4-e0e2a4a4e830" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 530.723395] env[63345]: DEBUG nova.network.neutron [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 530.870170] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016625, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480301} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 530.872307] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 5ef55aca-0714-4b34-85f2-b6d53f97c2d0/5ef55aca-0714-4b34-85f2-b6d53f97c2d0.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 530.872606] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 530.872869] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-305901f0-6088-45d6-a7d2-0e23a4e23b69 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.879308] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 530.879308] env[63345]: value = "task-1016626" [ 530.879308] env[63345]: _type = "Task" [ 530.879308] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.889393] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016626, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.037422] env[63345]: INFO nova.compute.manager [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] Took 1.03 seconds to deallocate network for instance. [ 531.274066] env[63345]: DEBUG nova.network.neutron [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 531.390083] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016626, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067316} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.390402] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 531.391614] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b039fe-1c9d-48e2-9501-283055bb08b0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.418841] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 5ef55aca-0714-4b34-85f2-b6d53f97c2d0/5ef55aca-0714-4b34-85f2-b6d53f97c2d0.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 531.419698] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d001ef0-cd90-4e19-b809-e7184ac983dd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.442804] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 531.442804] env[63345]: value = "task-1016627" [ 531.442804] env[63345]: _type = "Task" [ 531.442804] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.452132] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016627, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.665456] env[63345]: DEBUG nova.network.neutron [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.961203] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016627, 'name': ReconfigVM_Task, 'duration_secs': 0.270082} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.961496] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 5ef55aca-0714-4b34-85f2-b6d53f97c2d0/5ef55aca-0714-4b34-85f2-b6d53f97c2d0.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 531.965923] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9946d757-c6c3-427f-8832-12de65e5e570 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.972345] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 531.972345] env[63345]: value = "task-1016628" [ 531.972345] env[63345]: _type = "Task" [ 531.972345] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.984416] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f198e364-e4b5-4cf2-91d5-5b5fb973951e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.990541] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016628, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.995532] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4054ed49-581e-4e95-b18c-9ff23113902a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.035992] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab046165-a131-489f-a874-474774f9ad7e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.047160] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d0f2c9-61d6-418c-96cd-051fd0876708 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.065484] env[63345]: DEBUG nova.compute.provider_tree [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 532.090544] env[63345]: INFO nova.scheduler.client.report [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Deleted allocations for instance d35db4e4-b25c-4811-a93f-cd337f6f9142 [ 532.169021] env[63345]: DEBUG oslo_concurrency.lockutils [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Releasing lock "refresh_cache-a6858a79-06b8-4110-9da4-e0e2a4a4e830" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 532.169345] env[63345]: DEBUG nova.compute.manager [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 532.169543] env[63345]: DEBUG nova.compute.manager [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 532.169710] env[63345]: DEBUG nova.network.neutron [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 532.229161] env[63345]: DEBUG nova.network.neutron [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 532.420645] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "175ede99-48e4-43dc-b563-140f42244c97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.420800] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "175ede99-48e4-43dc-b563-140f42244c97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.460134] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.460373] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.492933] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016628, 'name': Rename_Task, 'duration_secs': 0.124016} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.493305] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 532.493725] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bab1b815-6e4f-4fec-952b-bdd72557dcbf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.502563] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 532.502563] env[63345]: value = "task-1016629" [ 532.502563] env[63345]: _type = "Task" [ 532.502563] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 532.513428] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016629, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 532.570440] env[63345]: DEBUG nova.scheduler.client.report [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 532.603490] env[63345]: DEBUG oslo_concurrency.lockutils [None req-01122426-3960-4bc5-9640-79ecd48623c9 tempest-TenantUsagesTestJSON-1887303192 tempest-TenantUsagesTestJSON-1887303192-project-member] Lock "d35db4e4-b25c-4811-a93f-cd337f6f9142" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.721s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.606581] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "d35db4e4-b25c-4811-a93f-cd337f6f9142" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 15.529s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.606581] env[63345]: INFO nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: d35db4e4-b25c-4811-a93f-cd337f6f9142] During sync_power_state the instance has a pending task (spawning). Skip. [ 532.606581] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "d35db4e4-b25c-4811-a93f-cd337f6f9142" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.735775] env[63345]: DEBUG nova.network.neutron [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.012698] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016629, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 533.076824] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.361s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 533.077427] env[63345]: DEBUG nova.compute.manager [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 533.080713] env[63345]: DEBUG oslo_concurrency.lockutils [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.506s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.084393] env[63345]: INFO nova.compute.claims [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 533.106529] env[63345]: DEBUG nova.compute.manager [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 533.239398] env[63345]: INFO nova.compute.manager [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] Took 1.07 seconds to deallocate network for instance. [ 533.513980] env[63345]: DEBUG oslo_vmware.api [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016629, 'name': PowerOnVM_Task, 'duration_secs': 0.988358} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 533.515182] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 533.515546] env[63345]: INFO nova.compute.manager [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Took 4.38 seconds to spawn the instance on the hypervisor. [ 533.515826] env[63345]: DEBUG nova.compute.manager [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 533.516964] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b016433-7604-4855-8625-1d1c0cffc019 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.588140] env[63345]: DEBUG nova.compute.utils [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 533.592287] env[63345]: DEBUG nova.compute.manager [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 533.592287] env[63345]: DEBUG nova.network.neutron [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 533.635688] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.870076] env[63345]: DEBUG nova.policy [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58ded2b32a824173aff736275c32392b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd00846b9ef0b4c94bcad73ed03ef9178', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 534.045772] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Acquiring lock "6e8ef6b9-4684-4685-949a-2e2868aa3fb7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.051206] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Lock "6e8ef6b9-4684-4685-949a-2e2868aa3fb7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.053840] env[63345]: INFO nova.compute.manager [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Took 16.77 seconds to build instance. [ 534.092251] env[63345]: DEBUG nova.compute.manager [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 534.285500] env[63345]: INFO nova.scheduler.client.report [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Deleted allocations for instance a6858a79-06b8-4110-9da4-e0e2a4a4e830 [ 534.367873] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Acquiring lock "1f595aef-799f-4ca4-be91-e95ef056926c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.368167] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Lock "1f595aef-799f-4ca4-be91-e95ef056926c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.461558] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99101989-9323-4269-a883-fe3e16e12b60 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.469574] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9997bd3-4869-4961-a63a-b3731429391c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.502654] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59169fb-e111-4ff3-904c-027069482e23 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.512180] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4025fc4d-d7ff-40fb-9ba0-22bf7d0a4cbc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.527758] env[63345]: DEBUG nova.compute.provider_tree [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 534.554643] env[63345]: DEBUG oslo_concurrency.lockutils [None req-21a28cdb-3c62-46ca-b3e7-e82e5d924ac4 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Lock "5ef55aca-0714-4b34-85f2-b6d53f97c2d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.280s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 534.612040] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Acquiring lock "77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.612040] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Lock "77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.803365] env[63345]: DEBUG oslo_concurrency.lockutils [None req-358d58e2-d3f0-40f3-a40e-86b8333d3d19 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "a6858a79-06b8-4110-9da4-e0e2a4a4e830" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.371s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 534.805375] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "a6858a79-06b8-4110-9da4-e0e2a4a4e830" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 17.729s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.806664] env[63345]: INFO nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: a6858a79-06b8-4110-9da4-e0e2a4a4e830] During sync_power_state the instance has a pending task (spawning). Skip. [ 534.806664] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "a6858a79-06b8-4110-9da4-e0e2a4a4e830" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.031314] env[63345]: DEBUG nova.scheduler.client.report [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 535.061019] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 535.106675] env[63345]: DEBUG nova.compute.manager [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 535.153472] env[63345]: DEBUG nova.virt.hardware [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 535.153791] env[63345]: DEBUG nova.virt.hardware [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 535.153995] env[63345]: DEBUG nova.virt.hardware [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 535.159120] env[63345]: DEBUG nova.virt.hardware [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 535.159120] env[63345]: DEBUG nova.virt.hardware [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 535.159120] env[63345]: DEBUG nova.virt.hardware [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 535.159120] env[63345]: DEBUG nova.virt.hardware [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 535.159120] env[63345]: DEBUG nova.virt.hardware [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 535.159473] env[63345]: DEBUG nova.virt.hardware [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 535.159473] env[63345]: DEBUG nova.virt.hardware [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 535.159473] env[63345]: DEBUG nova.virt.hardware [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 535.159473] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560e6b6c-dbb2-4777-9fdd-15175cae45a7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.167698] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6dfb137-227a-4cc6-af6f-a08e47aa4106 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.309671] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 535.544763] env[63345]: DEBUG oslo_concurrency.lockutils [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.461s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.544763] env[63345]: DEBUG nova.compute.manager [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 535.545597] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.259s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.548505] env[63345]: INFO nova.compute.claims [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 535.603093] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.704735] env[63345]: DEBUG oslo_concurrency.lockutils [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Acquiring lock "1a54db9b-0482-4038-a505-46447f0c33ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.704964] env[63345]: DEBUG oslo_concurrency.lockutils [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Lock "1a54db9b-0482-4038-a505-46447f0c33ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.839273] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.971519] env[63345]: DEBUG nova.network.neutron [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Successfully created port: ca7e4f78-240a-446f-ad69-0e78b3d99ee7 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 536.057470] env[63345]: DEBUG nova.compute.utils [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 536.066177] env[63345]: DEBUG nova.compute.manager [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 536.066739] env[63345]: DEBUG nova.network.neutron [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 536.086671] env[63345]: INFO nova.compute.manager [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Rebuilding instance [ 536.164687] env[63345]: DEBUG nova.compute.manager [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 536.165755] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f6c7b6-09c7-43b2-b941-77f3a7788ea1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.303555] env[63345]: DEBUG nova.policy [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '565bd492468042fdaa8899d9cc7578df', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7044339f9801407381379b0fceae7d8d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 536.568969] env[63345]: DEBUG nova.compute.manager [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 536.895302] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa001971-d0bc-46a8-be37-7fd5b704af9b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.908023] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db617f18-36ba-4dfc-9e21-be1362a8bf29 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.945829] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082eab6f-2200-48aa-b7ce-7ed1393d5290 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.954824] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655831f6-71e4-427a-bcb0-b93cd5a5922e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.973193] env[63345]: DEBUG nova.compute.provider_tree [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 537.188216] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 537.188216] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f5ac1ae0-18fd-4df1-b045-68b5391965f6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.196861] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 537.196861] env[63345]: value = "task-1016630" [ 537.196861] env[63345]: _type = "Task" [ 537.196861] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 537.210767] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016630, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 537.477555] env[63345]: DEBUG nova.scheduler.client.report [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 537.534641] env[63345]: DEBUG nova.network.neutron [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Successfully created port: fc3acfbf-5f06-437b-a030-e8eec8a877b8 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 537.581237] env[63345]: DEBUG nova.compute.manager [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 537.614240] env[63345]: DEBUG nova.virt.hardware [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 537.614240] env[63345]: DEBUG nova.virt.hardware [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 537.614408] env[63345]: DEBUG nova.virt.hardware [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 537.614464] env[63345]: DEBUG nova.virt.hardware [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 537.614606] env[63345]: DEBUG nova.virt.hardware [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 537.614750] env[63345]: DEBUG nova.virt.hardware [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 537.614953] env[63345]: DEBUG nova.virt.hardware [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 537.615250] env[63345]: DEBUG nova.virt.hardware [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 537.615702] env[63345]: DEBUG nova.virt.hardware [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 537.615702] env[63345]: DEBUG nova.virt.hardware [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 537.615870] env[63345]: DEBUG nova.virt.hardware [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 537.616637] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ef0520-4c7a-4cb4-8349-fc86dcda6151 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.629445] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f70a7fc5-5e35-487a-8cd7-860efeef79c8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.707472] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016630, 'name': PowerOffVM_Task, 'duration_secs': 0.140847} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 537.707969] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 537.708553] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 537.709426] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c06c51-1443-40e0-abdd-f3e5b1307f26 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.717748] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 537.720719] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-db035225-5f44-4279-899e-03e30c66582f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.752249] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 537.752614] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 537.752700] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Deleting the datastore file [datastore2] 5ef55aca-0714-4b34-85f2-b6d53f97c2d0 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 537.752963] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-031ef18a-625c-4143-a83f-c4ba46308e1b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.764183] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 537.764183] env[63345]: value = "task-1016632" [ 537.764183] env[63345]: _type = "Task" [ 537.764183] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 537.775861] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016632, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 537.992838] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.993417] env[63345]: DEBUG nova.compute.manager [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 537.996430] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.395s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.275738] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016632, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.11516} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 538.276180] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 538.276180] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 538.276327] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 538.503581] env[63345]: DEBUG nova.compute.utils [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 538.509425] env[63345]: DEBUG nova.compute.manager [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 538.509425] env[63345]: DEBUG nova.network.neutron [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 538.808821] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45469ad9-f033-418a-b8b0-0f28829e98a9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.818086] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2963ded-9e94-4846-9a20-36428ed495ba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.854246] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7acc3008-0f75-44f3-9b0f-f5e268f02dfc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.864037] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb7bf83-53c2-4d56-bce0-7c4df9240a06 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.879512] env[63345]: DEBUG nova.compute.provider_tree [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 538.890213] env[63345]: DEBUG nova.policy [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '93a60dc04d9c4e0e892a41c9699b6be0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40bd4eb2e52c428cac55f1ed81872157', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 539.012416] env[63345]: DEBUG nova.compute.manager [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 539.324980] env[63345]: DEBUG nova.virt.hardware [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 539.325317] env[63345]: DEBUG nova.virt.hardware [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 539.325537] env[63345]: DEBUG nova.virt.hardware [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 539.325893] env[63345]: DEBUG nova.virt.hardware [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 539.325975] env[63345]: DEBUG nova.virt.hardware [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 539.326251] env[63345]: DEBUG nova.virt.hardware [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 539.326358] env[63345]: DEBUG nova.virt.hardware [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 539.326590] env[63345]: DEBUG nova.virt.hardware [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 539.326753] env[63345]: DEBUG nova.virt.hardware [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 539.327427] env[63345]: DEBUG nova.virt.hardware [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 539.327427] env[63345]: DEBUG nova.virt.hardware [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 539.328299] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e98445-31c7-4267-a6ee-adcc54a4f700 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.336516] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696f066c-cacb-4bda-ad2a-b3c58cee48c0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.350864] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Instance VIF info [] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 539.356919] env[63345]: DEBUG oslo.service.loopingcall [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 539.357248] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 539.357473] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-23cc19e2-1d3f-4726-b5d7-1fb9ef202f2a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.383188] env[63345]: DEBUG nova.scheduler.client.report [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 539.386335] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 539.386335] env[63345]: value = "task-1016633" [ 539.386335] env[63345]: _type = "Task" [ 539.386335] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 539.401106] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016633, 'name': CreateVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 539.898864] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.902s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.899536] env[63345]: ERROR nova.compute.manager [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 80ef48d2-0611-4708-9b7a-7b609048be7e, please check neutron logs for more information. [ 539.899536] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Traceback (most recent call last): [ 539.899536] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 539.899536] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] self.driver.spawn(context, instance, image_meta, [ 539.899536] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 539.899536] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 539.899536] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 539.899536] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] vm_ref = self.build_virtual_machine(instance, [ 539.899536] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 539.899536] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] vif_infos = vmwarevif.get_vif_info(self._session, [ 539.899536] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 539.899903] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] for vif in network_info: [ 539.899903] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 539.899903] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] return self._sync_wrapper(fn, *args, **kwargs) [ 539.899903] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 539.899903] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] self.wait() [ 539.899903] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 539.899903] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] self[:] = self._gt.wait() [ 539.899903] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 539.899903] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] return self._exit_event.wait() [ 539.899903] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 539.899903] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] result = hub.switch() [ 539.899903] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 539.899903] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] return self.greenlet.switch() [ 539.900275] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 539.900275] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] result = function(*args, **kwargs) [ 539.900275] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 539.900275] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] return func(*args, **kwargs) [ 539.900275] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 539.900275] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] raise e [ 539.900275] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 539.900275] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] nwinfo = self.network_api.allocate_for_instance( [ 539.900275] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 539.900275] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] created_port_ids = self._update_ports_for_instance( [ 539.900275] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 539.900275] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] with excutils.save_and_reraise_exception(): [ 539.900275] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 539.900668] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] self.force_reraise() [ 539.900668] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 539.900668] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] raise self.value [ 539.900668] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 539.900668] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] updated_port = self._update_port( [ 539.900668] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 539.900668] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] _ensure_no_port_binding_failure(port) [ 539.900668] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 539.900668] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] raise exception.PortBindingFailed(port_id=port['id']) [ 539.900668] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] nova.exception.PortBindingFailed: Binding failed for port 80ef48d2-0611-4708-9b7a-7b609048be7e, please check neutron logs for more information. [ 539.900668] env[63345]: ERROR nova.compute.manager [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] [ 539.900981] env[63345]: DEBUG nova.compute.utils [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Binding failed for port 80ef48d2-0611-4708-9b7a-7b609048be7e, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 539.905211] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016633, 'name': CreateVM_Task, 'duration_secs': 0.321766} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 539.906164] env[63345]: DEBUG nova.compute.manager [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Build of instance aa21e116-3bf1-4574-8d4f-d0a1af692e8b was re-scheduled: Binding failed for port 80ef48d2-0611-4708-9b7a-7b609048be7e, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 539.906615] env[63345]: DEBUG nova.compute.manager [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 539.906846] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Acquiring lock "refresh_cache-aa21e116-3bf1-4574-8d4f-d0a1af692e8b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 539.907528] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Acquired lock "refresh_cache-aa21e116-3bf1-4574-8d4f-d0a1af692e8b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 539.907528] env[63345]: DEBUG nova.network.neutron [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 539.908256] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.803s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.908448] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.908600] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63345) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 539.908872] env[63345]: DEBUG oslo_concurrency.lockutils [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.433s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.914880] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 539.916805] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbe855c-dc69-455f-a7b1-8d1f4bf9092b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.921039] env[63345]: DEBUG oslo_concurrency.lockutils [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 539.922132] env[63345]: DEBUG oslo_concurrency.lockutils [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 539.922960] env[63345]: DEBUG oslo_concurrency.lockutils [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 539.925026] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30c10aca-a88f-40e1-88e5-3b7e6217b747 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.938154] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d02b4b-082d-47ee-9d10-33981519b64e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.952200] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 539.952200] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52e76f24-eeb0-b999-469c-e56c45cfeabd" [ 539.952200] env[63345]: _type = "Task" [ 539.952200] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 539.967453] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2bd5816-49e4-4e31-99b1-ee678d1833b9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.974052] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e76f24-eeb0-b999-469c-e56c45cfeabd, 'name': SearchDatastore_Task, 'duration_secs': 0.016396} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 539.974630] env[63345]: DEBUG oslo_concurrency.lockutils [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.974864] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 539.975119] env[63345]: DEBUG oslo_concurrency.lockutils [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 539.975270] env[63345]: DEBUG oslo_concurrency.lockutils [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 539.975472] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 539.977941] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e511d48-38e8-4b89-9784-7ff35a4204cd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.981425] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f843506-3da2-4dca-9535-7aff9cb37f5a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.986211] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 539.986388] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 540.017343] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5152e384-9590-4ec6-bb93-0c827e58cac2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.019513] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181377MB free_disk=187GB free_vcpus=48 pci_devices=None {{(pid=63345) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 540.019667] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.023522] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 540.023522] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b957a7-38ce-6a91-7572-055df9a0348a" [ 540.023522] env[63345]: _type = "Task" [ 540.023522] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 540.041214] env[63345]: DEBUG nova.compute.manager [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 540.059016] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b957a7-38ce-6a91-7572-055df9a0348a, 'name': SearchDatastore_Task, 'duration_secs': 0.010108} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 540.059016] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b0cd023-d144-4775-8b62-c94acfe95155 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.068573] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 540.068573] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5225033b-f74d-1905-ee46-4bd80cdd6583" [ 540.068573] env[63345]: _type = "Task" [ 540.068573] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 540.080987] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5225033b-f74d-1905-ee46-4bd80cdd6583, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 540.091358] env[63345]: DEBUG nova.virt.hardware [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 540.091903] env[63345]: DEBUG nova.virt.hardware [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 540.092719] env[63345]: DEBUG nova.virt.hardware [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 540.093031] env[63345]: DEBUG nova.virt.hardware [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 540.093293] env[63345]: DEBUG nova.virt.hardware [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 540.093574] env[63345]: DEBUG nova.virt.hardware [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 540.093981] env[63345]: DEBUG nova.virt.hardware [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 540.094263] env[63345]: DEBUG nova.virt.hardware [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 540.094532] env[63345]: DEBUG nova.virt.hardware [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 540.094809] env[63345]: DEBUG nova.virt.hardware [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 540.095106] env[63345]: DEBUG nova.virt.hardware [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 540.096542] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82a1ff4-fcf6-469c-b381-3987399bee09 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.107221] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765f1363-c8b1-4790-8360-5300ef5676c6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.456481] env[63345]: DEBUG nova.network.neutron [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 540.581974] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5225033b-f74d-1905-ee46-4bd80cdd6583, 'name': SearchDatastore_Task, 'duration_secs': 0.008737} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 540.582366] env[63345]: DEBUG oslo_concurrency.lockutils [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 540.582644] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 5ef55aca-0714-4b34-85f2-b6d53f97c2d0/5ef55aca-0714-4b34-85f2-b6d53f97c2d0.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 540.582909] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c66bd1d2-8dc7-4617-b568-33c76f6c4495 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.596883] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 540.596883] env[63345]: value = "task-1016634" [ 540.596883] env[63345]: _type = "Task" [ 540.596883] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 540.608803] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 540.624219] env[63345]: DEBUG nova.network.neutron [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 540.770054] env[63345]: DEBUG nova.network.neutron [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Successfully created port: b0c5ad0c-5682-4e54-b4c7-916cd8074721 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 540.778881] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def7d1a0-7a52-409b-bb91-c6242823cc23 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.790596] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa855e3-abd3-4fee-aa23-d333d01e286f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.829076] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b4c450-0356-4e26-b3b4-f5e2b56387a1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.841137] env[63345]: DEBUG oslo_concurrency.lockutils [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.841137] env[63345]: DEBUG oslo_concurrency.lockutils [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.842486] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d1a7b9-14b2-410c-ba78-4cd384fcacdf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.857378] env[63345]: DEBUG nova.compute.provider_tree [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 540.983219] env[63345]: DEBUG nova.compute.manager [None req-af2868ce-94c2-40d3-bc1e-b9a57b867a64 tempest-ServerDiagnosticsV248Test-1711407586 tempest-ServerDiagnosticsV248Test-1711407586-project-admin] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 540.984516] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8fe9ce-9c3f-4b79-9abe-82cc0b9c6a55 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.994689] env[63345]: INFO nova.compute.manager [None req-af2868ce-94c2-40d3-bc1e-b9a57b867a64 tempest-ServerDiagnosticsV248Test-1711407586 tempest-ServerDiagnosticsV248Test-1711407586-project-admin] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Retrieving diagnostics [ 540.995738] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdfadec-a146-44d0-90d7-3f966ecf441f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.079038] env[63345]: ERROR nova.compute.manager [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fc3acfbf-5f06-437b-a030-e8eec8a877b8, please check neutron logs for more information. [ 541.079038] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 541.079038] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 541.079038] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 541.079038] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 541.079038] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 541.079038] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 541.079038] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 541.079038] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.079038] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 541.079038] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.079038] env[63345]: ERROR nova.compute.manager raise self.value [ 541.079038] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 541.079038] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 541.079038] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.079038] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 541.079584] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.079584] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 541.079584] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fc3acfbf-5f06-437b-a030-e8eec8a877b8, please check neutron logs for more information. [ 541.079584] env[63345]: ERROR nova.compute.manager [ 541.079584] env[63345]: Traceback (most recent call last): [ 541.079584] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 541.079584] env[63345]: listener.cb(fileno) [ 541.079584] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 541.079584] env[63345]: result = function(*args, **kwargs) [ 541.079584] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 541.079584] env[63345]: return func(*args, **kwargs) [ 541.079584] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 541.079584] env[63345]: raise e [ 541.079584] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 541.079584] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 541.079584] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 541.079584] env[63345]: created_port_ids = self._update_ports_for_instance( [ 541.079584] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 541.079584] env[63345]: with excutils.save_and_reraise_exception(): [ 541.079584] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.079584] env[63345]: self.force_reraise() [ 541.079584] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.079584] env[63345]: raise self.value [ 541.079584] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 541.079584] env[63345]: updated_port = self._update_port( [ 541.079584] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.079584] env[63345]: _ensure_no_port_binding_failure(port) [ 541.079584] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.079584] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 541.080476] env[63345]: nova.exception.PortBindingFailed: Binding failed for port fc3acfbf-5f06-437b-a030-e8eec8a877b8, please check neutron logs for more information. [ 541.080476] env[63345]: Removing descriptor: 16 [ 541.080476] env[63345]: ERROR nova.compute.manager [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fc3acfbf-5f06-437b-a030-e8eec8a877b8, please check neutron logs for more information. [ 541.080476] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Traceback (most recent call last): [ 541.080476] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 541.080476] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] yield resources [ 541.080476] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 541.080476] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] self.driver.spawn(context, instance, image_meta, [ 541.080476] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 541.080476] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] self._vmops.spawn(context, instance, image_meta, injected_files, [ 541.080476] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 541.080476] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] vm_ref = self.build_virtual_machine(instance, [ 541.080827] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 541.080827] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] vif_infos = vmwarevif.get_vif_info(self._session, [ 541.080827] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 541.080827] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] for vif in network_info: [ 541.080827] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 541.080827] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] return self._sync_wrapper(fn, *args, **kwargs) [ 541.080827] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 541.080827] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] self.wait() [ 541.080827] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 541.080827] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] self[:] = self._gt.wait() [ 541.080827] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 541.080827] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] return self._exit_event.wait() [ 541.080827] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 541.081203] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] result = hub.switch() [ 541.081203] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 541.081203] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] return self.greenlet.switch() [ 541.081203] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 541.081203] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] result = function(*args, **kwargs) [ 541.081203] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 541.081203] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] return func(*args, **kwargs) [ 541.081203] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 541.081203] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] raise e [ 541.081203] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 541.081203] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] nwinfo = self.network_api.allocate_for_instance( [ 541.081203] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 541.081203] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] created_port_ids = self._update_ports_for_instance( [ 541.081578] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 541.081578] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] with excutils.save_and_reraise_exception(): [ 541.081578] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.081578] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] self.force_reraise() [ 541.081578] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.081578] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] raise self.value [ 541.081578] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 541.081578] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] updated_port = self._update_port( [ 541.081578] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.081578] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] _ensure_no_port_binding_failure(port) [ 541.081578] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.081578] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] raise exception.PortBindingFailed(port_id=port['id']) [ 541.081926] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] nova.exception.PortBindingFailed: Binding failed for port fc3acfbf-5f06-437b-a030-e8eec8a877b8, please check neutron logs for more information. [ 541.081926] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] [ 541.081926] env[63345]: INFO nova.compute.manager [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Terminating instance [ 541.107693] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016634, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499452} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 541.107988] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 5ef55aca-0714-4b34-85f2-b6d53f97c2d0/5ef55aca-0714-4b34-85f2-b6d53f97c2d0.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 541.108510] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 541.108791] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-299abcd6-12de-4381-80fe-e1905c0f68d1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.116608] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 541.116608] env[63345]: value = "task-1016635" [ 541.116608] env[63345]: _type = "Task" [ 541.116608] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.129984] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Releasing lock "refresh_cache-aa21e116-3bf1-4574-8d4f-d0a1af692e8b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 541.130102] env[63345]: DEBUG nova.compute.manager [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 541.130256] env[63345]: DEBUG nova.compute.manager [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 541.131150] env[63345]: DEBUG nova.network.neutron [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 541.132388] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016635, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 541.163710] env[63345]: DEBUG nova.network.neutron [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 541.361542] env[63345]: DEBUG nova.scheduler.client.report [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 541.588733] env[63345]: DEBUG oslo_concurrency.lockutils [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Acquiring lock "refresh_cache-1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.588733] env[63345]: DEBUG oslo_concurrency.lockutils [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Acquired lock "refresh_cache-1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.588733] env[63345]: DEBUG nova.network.neutron [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 541.630724] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016635, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063633} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 541.631408] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 541.632246] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208d20fe-da0b-43d8-8832-3789ef7c4b5d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.656246] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 5ef55aca-0714-4b34-85f2-b6d53f97c2d0/5ef55aca-0714-4b34-85f2-b6d53f97c2d0.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 541.656246] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e56e17cd-ebe0-404a-84db-1e6c68420ff8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.673219] env[63345]: DEBUG nova.network.neutron [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 541.687637] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 541.687637] env[63345]: value = "task-1016636" [ 541.687637] env[63345]: _type = "Task" [ 541.687637] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.700245] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016636, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 541.866819] env[63345]: DEBUG oslo_concurrency.lockutils [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.958s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 541.867596] env[63345]: ERROR nova.compute.manager [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7a36bb25-76ac-447e-a3ad-bb832183380e, please check neutron logs for more information. [ 541.867596] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Traceback (most recent call last): [ 541.867596] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 541.867596] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] self.driver.spawn(context, instance, image_meta, [ 541.867596] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 541.867596] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] self._vmops.spawn(context, instance, image_meta, injected_files, [ 541.867596] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 541.867596] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] vm_ref = self.build_virtual_machine(instance, [ 541.867596] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 541.867596] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] vif_infos = vmwarevif.get_vif_info(self._session, [ 541.867596] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 541.867973] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] for vif in network_info: [ 541.867973] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 541.867973] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] return self._sync_wrapper(fn, *args, **kwargs) [ 541.867973] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 541.867973] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] self.wait() [ 541.867973] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 541.867973] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] self[:] = self._gt.wait() [ 541.867973] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 541.867973] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] return self._exit_event.wait() [ 541.867973] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 541.867973] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] current.throw(*self._exc) [ 541.867973] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 541.867973] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] result = function(*args, **kwargs) [ 541.868335] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 541.868335] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] return func(*args, **kwargs) [ 541.868335] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 541.868335] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] raise e [ 541.868335] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 541.868335] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] nwinfo = self.network_api.allocate_for_instance( [ 541.868335] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 541.868335] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] created_port_ids = self._update_ports_for_instance( [ 541.868335] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 541.868335] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] with excutils.save_and_reraise_exception(): [ 541.868335] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.868335] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] self.force_reraise() [ 541.868335] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.868699] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] raise self.value [ 541.868699] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 541.868699] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] updated_port = self._update_port( [ 541.868699] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.868699] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] _ensure_no_port_binding_failure(port) [ 541.868699] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.868699] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] raise exception.PortBindingFailed(port_id=port['id']) [ 541.868699] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] nova.exception.PortBindingFailed: Binding failed for port 7a36bb25-76ac-447e-a3ad-bb832183380e, please check neutron logs for more information. [ 541.868699] env[63345]: ERROR nova.compute.manager [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] [ 541.868699] env[63345]: DEBUG nova.compute.utils [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Binding failed for port 7a36bb25-76ac-447e-a3ad-bb832183380e, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 541.870223] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.115s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.871972] env[63345]: INFO nova.compute.claims [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 541.879020] env[63345]: DEBUG nova.compute.manager [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Build of instance f4e897ce-2df5-40ae-99a8-11cac4902588 was re-scheduled: Binding failed for port 7a36bb25-76ac-447e-a3ad-bb832183380e, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 541.879020] env[63345]: DEBUG nova.compute.manager [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 541.879020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "refresh_cache-f4e897ce-2df5-40ae-99a8-11cac4902588" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.879020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquired lock "refresh_cache-f4e897ce-2df5-40ae-99a8-11cac4902588" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.879304] env[63345]: DEBUG nova.network.neutron [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 542.127433] env[63345]: DEBUG nova.network.neutron [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 542.189879] env[63345]: INFO nova.compute.manager [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] Took 1.06 seconds to deallocate network for instance. [ 542.207927] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016636, 'name': ReconfigVM_Task, 'duration_secs': 0.278852} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 542.208226] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 5ef55aca-0714-4b34-85f2-b6d53f97c2d0/5ef55aca-0714-4b34-85f2-b6d53f97c2d0.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 542.209556] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-143e8ca9-32b1-4331-9e78-d7f327c5a3b8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.220106] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 542.220106] env[63345]: value = "task-1016637" [ 542.220106] env[63345]: _type = "Task" [ 542.220106] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.235289] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016637, 'name': Rename_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 542.330532] env[63345]: ERROR nova.compute.manager [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ca7e4f78-240a-446f-ad69-0e78b3d99ee7, please check neutron logs for more information. [ 542.330532] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 542.330532] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 542.330532] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 542.330532] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 542.330532] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 542.330532] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 542.330532] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 542.330532] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 542.330532] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 542.330532] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 542.330532] env[63345]: ERROR nova.compute.manager raise self.value [ 542.330532] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 542.330532] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 542.330532] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 542.330532] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 542.331396] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 542.331396] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 542.331396] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ca7e4f78-240a-446f-ad69-0e78b3d99ee7, please check neutron logs for more information. [ 542.331396] env[63345]: ERROR nova.compute.manager [ 542.331396] env[63345]: Traceback (most recent call last): [ 542.331396] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 542.331396] env[63345]: listener.cb(fileno) [ 542.331396] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 542.331396] env[63345]: result = function(*args, **kwargs) [ 542.331396] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 542.331396] env[63345]: return func(*args, **kwargs) [ 542.331396] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 542.331396] env[63345]: raise e [ 542.331396] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 542.331396] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 542.331396] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 542.331396] env[63345]: created_port_ids = self._update_ports_for_instance( [ 542.331396] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 542.331396] env[63345]: with excutils.save_and_reraise_exception(): [ 542.331396] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 542.331396] env[63345]: self.force_reraise() [ 542.331396] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 542.331396] env[63345]: raise self.value [ 542.331396] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 542.331396] env[63345]: updated_port = self._update_port( [ 542.331396] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 542.331396] env[63345]: _ensure_no_port_binding_failure(port) [ 542.331396] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 542.331396] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 542.332249] env[63345]: nova.exception.PortBindingFailed: Binding failed for port ca7e4f78-240a-446f-ad69-0e78b3d99ee7, please check neutron logs for more information. [ 542.332249] env[63345]: Removing descriptor: 15 [ 542.332249] env[63345]: ERROR nova.compute.manager [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ca7e4f78-240a-446f-ad69-0e78b3d99ee7, please check neutron logs for more information. [ 542.332249] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Traceback (most recent call last): [ 542.332249] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 542.332249] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] yield resources [ 542.332249] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 542.332249] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] self.driver.spawn(context, instance, image_meta, [ 542.332249] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 542.332249] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 542.332249] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 542.332249] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] vm_ref = self.build_virtual_machine(instance, [ 542.332614] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 542.332614] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] vif_infos = vmwarevif.get_vif_info(self._session, [ 542.332614] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 542.332614] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] for vif in network_info: [ 542.332614] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 542.332614] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] return self._sync_wrapper(fn, *args, **kwargs) [ 542.332614] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 542.332614] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] self.wait() [ 542.332614] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 542.332614] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] self[:] = self._gt.wait() [ 542.332614] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 542.332614] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] return self._exit_event.wait() [ 542.332614] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 542.332972] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] result = hub.switch() [ 542.332972] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 542.332972] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] return self.greenlet.switch() [ 542.332972] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 542.332972] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] result = function(*args, **kwargs) [ 542.332972] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 542.332972] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] return func(*args, **kwargs) [ 542.332972] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 542.332972] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] raise e [ 542.332972] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 542.332972] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] nwinfo = self.network_api.allocate_for_instance( [ 542.332972] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 542.332972] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] created_port_ids = self._update_ports_for_instance( [ 542.333419] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 542.333419] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] with excutils.save_and_reraise_exception(): [ 542.333419] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 542.333419] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] self.force_reraise() [ 542.333419] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 542.333419] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] raise self.value [ 542.333419] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 542.333419] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] updated_port = self._update_port( [ 542.333419] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 542.333419] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] _ensure_no_port_binding_failure(port) [ 542.333419] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 542.333419] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] raise exception.PortBindingFailed(port_id=port['id']) [ 542.333771] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] nova.exception.PortBindingFailed: Binding failed for port ca7e4f78-240a-446f-ad69-0e78b3d99ee7, please check neutron logs for more information. [ 542.333771] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] [ 542.333771] env[63345]: INFO nova.compute.manager [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Terminating instance [ 542.395154] env[63345]: DEBUG nova.network.neutron [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.458893] env[63345]: DEBUG nova.network.neutron [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 542.728976] env[63345]: DEBUG nova.network.neutron [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.732263] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016637, 'name': Rename_Task, 'duration_secs': 0.142352} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 542.732923] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 542.733297] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-276bba93-96cc-49a2-9c95-7880e315ee80 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.741762] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 542.741762] env[63345]: value = "task-1016638" [ 542.741762] env[63345]: _type = "Task" [ 542.741762] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.751747] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016638, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 542.835767] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Acquiring lock "refresh_cache-cec6ec60-5e8a-4c31-ba75-001f3c1980f0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 542.835965] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Acquired lock "refresh_cache-cec6ec60-5e8a-4c31-ba75-001f3c1980f0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 542.836256] env[63345]: DEBUG nova.network.neutron [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 542.899821] env[63345]: DEBUG oslo_concurrency.lockutils [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Releasing lock "refresh_cache-1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.900010] env[63345]: DEBUG nova.compute.manager [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 542.900216] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 542.900827] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5691c153-2f7f-42bb-8c1c-cf3b201ce14a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.911987] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72b8716-3c86-48d0-b962-c777a1d94e42 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.939723] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68 could not be found. [ 542.939872] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 542.941585] env[63345]: INFO nova.compute.manager [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Took 0.04 seconds to destroy the instance on the hypervisor. [ 542.941585] env[63345]: DEBUG oslo.service.loopingcall [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 542.945027] env[63345]: DEBUG nova.compute.manager [-] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 542.947386] env[63345]: DEBUG nova.network.neutron [-] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 543.004295] env[63345]: DEBUG nova.network.neutron [-] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 543.236056] env[63345]: INFO nova.scheduler.client.report [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Deleted allocations for instance aa21e116-3bf1-4574-8d4f-d0a1af692e8b [ 543.246028] env[63345]: DEBUG oslo_concurrency.lockutils [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Releasing lock "refresh_cache-f4e897ce-2df5-40ae-99a8-11cac4902588" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 543.246028] env[63345]: DEBUG nova.compute.manager [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 543.246028] env[63345]: DEBUG nova.compute.manager [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 543.246028] env[63345]: DEBUG nova.network.neutron [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 543.263848] env[63345]: DEBUG oslo_vmware.api [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016638, 'name': PowerOnVM_Task, 'duration_secs': 0.444557} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 543.264530] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 543.265182] env[63345]: DEBUG nova.compute.manager [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 543.266163] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b364b2cd-5d00-4f79-a08e-716447461694 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.279902] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4667494-c8b2-4349-b13a-1c6285942896 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.289960] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42077568-0d48-4e56-8b2c-575740acdd55 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.329875] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1deaaf3e-6a7a-4bb7-9728-1fc9dbe86e75 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.341891] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a871961c-52c0-4612-b93d-408cdfd75eed {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.359537] env[63345]: DEBUG nova.compute.provider_tree [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.375989] env[63345]: DEBUG nova.network.neutron [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 543.434526] env[63345]: DEBUG nova.network.neutron [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 543.508022] env[63345]: DEBUG nova.network.neutron [-] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.562598] env[63345]: DEBUG nova.compute.manager [req-bf732374-895c-476c-aedd-afd0dd50a7e6 req-157c3b67-93aa-4566-8327-e4ea743dc2bc service nova] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Received event network-changed-fc3acfbf-5f06-437b-a030-e8eec8a877b8 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 543.562805] env[63345]: DEBUG nova.compute.manager [req-bf732374-895c-476c-aedd-afd0dd50a7e6 req-157c3b67-93aa-4566-8327-e4ea743dc2bc service nova] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Refreshing instance network info cache due to event network-changed-fc3acfbf-5f06-437b-a030-e8eec8a877b8. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 543.564601] env[63345]: DEBUG oslo_concurrency.lockutils [req-bf732374-895c-476c-aedd-afd0dd50a7e6 req-157c3b67-93aa-4566-8327-e4ea743dc2bc service nova] Acquiring lock "refresh_cache-1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.564601] env[63345]: DEBUG oslo_concurrency.lockutils [req-bf732374-895c-476c-aedd-afd0dd50a7e6 req-157c3b67-93aa-4566-8327-e4ea743dc2bc service nova] Acquired lock "refresh_cache-1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.564601] env[63345]: DEBUG nova.network.neutron [req-bf732374-895c-476c-aedd-afd0dd50a7e6 req-157c3b67-93aa-4566-8327-e4ea743dc2bc service nova] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Refreshing network info cache for port fc3acfbf-5f06-437b-a030-e8eec8a877b8 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 543.702861] env[63345]: DEBUG nova.network.neutron [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.748487] env[63345]: DEBUG oslo_concurrency.lockutils [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "079cd9f1-4753-4298-9b06-c3b9925d2982" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.748487] env[63345]: DEBUG oslo_concurrency.lockutils [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "079cd9f1-4753-4298-9b06-c3b9925d2982" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.753533] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04fcf833-0c89-4319-90a1-d11e5e77b068 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Lock "aa21e116-3bf1-4574-8d4f-d0a1af692e8b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.569s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.755476] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "aa21e116-3bf1-4574-8d4f-d0a1af692e8b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 26.678s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.755476] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9944b8c5-3691-4a27-a663-173b73af2ecb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.765654] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1696b7dc-9dbe-4717-869d-a13378e40ff5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.796632] env[63345]: DEBUG oslo_concurrency.lockutils [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.862534] env[63345]: DEBUG nova.scheduler.client.report [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 543.937321] env[63345]: DEBUG nova.network.neutron [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.010409] env[63345]: INFO nova.compute.manager [-] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Took 1.07 seconds to deallocate network for instance. [ 544.013285] env[63345]: DEBUG nova.compute.claims [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 544.013472] env[63345]: DEBUG oslo_concurrency.lockutils [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.108171] env[63345]: DEBUG nova.network.neutron [req-bf732374-895c-476c-aedd-afd0dd50a7e6 req-157c3b67-93aa-4566-8327-e4ea743dc2bc service nova] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 544.207167] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Releasing lock "refresh_cache-cec6ec60-5e8a-4c31-ba75-001f3c1980f0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 544.208165] env[63345]: DEBUG nova.compute.manager [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 544.208165] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 544.208933] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-00a05942-739f-45d1-a51c-05f1d6dbd4bc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.219121] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25be499-346c-447c-ae88-d4aaf34141a3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.247356] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cec6ec60-5e8a-4c31-ba75-001f3c1980f0 could not be found. [ 544.248979] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 544.248979] env[63345]: INFO nova.compute.manager [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 544.248979] env[63345]: DEBUG oslo.service.loopingcall [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 544.248979] env[63345]: DEBUG nova.compute.manager [-] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 544.248979] env[63345]: DEBUG nova.network.neutron [-] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 544.256691] env[63345]: DEBUG nova.compute.manager [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 544.296745] env[63345]: INFO nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: aa21e116-3bf1-4574-8d4f-d0a1af692e8b] During the sync_power process the instance has moved from host None to host cpu-1 [ 544.297038] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "aa21e116-3bf1-4574-8d4f-d0a1af692e8b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.542s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.312919] env[63345]: DEBUG nova.network.neutron [-] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 544.367978] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.498s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.369210] env[63345]: DEBUG nova.compute.manager [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 544.371797] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.758s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.439619] env[63345]: DEBUG nova.network.neutron [req-bf732374-895c-476c-aedd-afd0dd50a7e6 req-157c3b67-93aa-4566-8327-e4ea743dc2bc service nova] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.441202] env[63345]: INFO nova.compute.manager [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: f4e897ce-2df5-40ae-99a8-11cac4902588] Took 1.20 seconds to deallocate network for instance. [ 544.788304] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.816414] env[63345]: DEBUG nova.network.neutron [-] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.879200] env[63345]: DEBUG nova.compute.utils [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 544.883210] env[63345]: DEBUG nova.compute.manager [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 544.883529] env[63345]: DEBUG nova.network.neutron [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 544.949147] env[63345]: DEBUG oslo_concurrency.lockutils [req-bf732374-895c-476c-aedd-afd0dd50a7e6 req-157c3b67-93aa-4566-8327-e4ea743dc2bc service nova] Releasing lock "refresh_cache-1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 544.982156] env[63345]: DEBUG nova.policy [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f89b41ff8b8046b88b55576064501ae0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '46e83e720d554caca3dd9c8cb004438e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 545.131105] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquiring lock "2889c4d1-ac1b-404d-a4f7-2b908557348d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.131502] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Lock "2889c4d1-ac1b-404d-a4f7-2b908557348d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.183899] env[63345]: DEBUG nova.compute.manager [req-23e7cd42-a370-425f-a2b0-1a71414ba247 req-444ca8f0-14de-4736-9537-f5bd8857b9ab service nova] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Received event network-changed-ca7e4f78-240a-446f-ad69-0e78b3d99ee7 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 545.184168] env[63345]: DEBUG nova.compute.manager [req-23e7cd42-a370-425f-a2b0-1a71414ba247 req-444ca8f0-14de-4736-9537-f5bd8857b9ab service nova] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Refreshing instance network info cache due to event network-changed-ca7e4f78-240a-446f-ad69-0e78b3d99ee7. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 545.184441] env[63345]: DEBUG oslo_concurrency.lockutils [req-23e7cd42-a370-425f-a2b0-1a71414ba247 req-444ca8f0-14de-4736-9537-f5bd8857b9ab service nova] Acquiring lock "refresh_cache-cec6ec60-5e8a-4c31-ba75-001f3c1980f0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.184618] env[63345]: DEBUG oslo_concurrency.lockutils [req-23e7cd42-a370-425f-a2b0-1a71414ba247 req-444ca8f0-14de-4736-9537-f5bd8857b9ab service nova] Acquired lock "refresh_cache-cec6ec60-5e8a-4c31-ba75-001f3c1980f0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.184807] env[63345]: DEBUG nova.network.neutron [req-23e7cd42-a370-425f-a2b0-1a71414ba247 req-444ca8f0-14de-4736-9537-f5bd8857b9ab service nova] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Refreshing network info cache for port ca7e4f78-240a-446f-ad69-0e78b3d99ee7 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 545.242501] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd087dd-5552-480b-8c6f-2e846bfd0fbe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.251296] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d38349-13bf-44c7-ac24-1255f122e558 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.291979] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1823d1c4-5834-49c0-8cf1-7f62070c804a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.303182] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c638eb6d-e6a8-42cf-a486-b6367d98c58c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.321046] env[63345]: INFO nova.compute.manager [-] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Took 1.07 seconds to deallocate network for instance. [ 545.321046] env[63345]: DEBUG nova.compute.provider_tree [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 545.323913] env[63345]: DEBUG nova.compute.claims [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 545.324013] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.384728] env[63345]: DEBUG nova.compute.manager [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 545.494079] env[63345]: INFO nova.scheduler.client.report [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Deleted allocations for instance f4e897ce-2df5-40ae-99a8-11cac4902588 [ 545.761047] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Acquiring lock "51d6db80-9d1f-4e38-a564-f587474f6294" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.761336] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Lock "51d6db80-9d1f-4e38-a564-f587474f6294" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.761579] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Acquiring lock "51d6db80-9d1f-4e38-a564-f587474f6294-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.761761] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Lock "51d6db80-9d1f-4e38-a564-f587474f6294-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.761932] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Lock "51d6db80-9d1f-4e38-a564-f587474f6294-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 545.766082] env[63345]: INFO nova.compute.manager [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Terminating instance [ 545.796828] env[63345]: DEBUG nova.network.neutron [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Successfully created port: 3fbb95f1-c342-46f0-9f12-4c6c1d67942d {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 545.802361] env[63345]: DEBUG nova.network.neutron [req-23e7cd42-a370-425f-a2b0-1a71414ba247 req-444ca8f0-14de-4736-9537-f5bd8857b9ab service nova] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 545.825265] env[63345]: DEBUG nova.scheduler.client.report [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 546.007446] env[63345]: DEBUG oslo_concurrency.lockutils [None req-092e8cc7-bca1-43ff-be18-4b9d654c5cad tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "f4e897ce-2df5-40ae-99a8-11cac4902588" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.871s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.196698] env[63345]: DEBUG nova.network.neutron [req-23e7cd42-a370-425f-a2b0-1a71414ba247 req-444ca8f0-14de-4736-9537-f5bd8857b9ab service nova] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.275192] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Acquiring lock "refresh_cache-51d6db80-9d1f-4e38-a564-f587474f6294" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.275192] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Acquired lock "refresh_cache-51d6db80-9d1f-4e38-a564-f587474f6294" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.275192] env[63345]: DEBUG nova.network.neutron [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 546.330581] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.959s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.331831] env[63345]: ERROR nova.compute.manager [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e75e1c47-4c22-4d2c-8365-bf365e8ee881, please check neutron logs for more information. [ 546.331831] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Traceback (most recent call last): [ 546.331831] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 546.331831] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] self.driver.spawn(context, instance, image_meta, [ 546.331831] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 546.331831] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] self._vmops.spawn(context, instance, image_meta, injected_files, [ 546.331831] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 546.331831] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] vm_ref = self.build_virtual_machine(instance, [ 546.331831] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 546.331831] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] vif_infos = vmwarevif.get_vif_info(self._session, [ 546.331831] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 546.332532] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] for vif in network_info: [ 546.332532] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 546.332532] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] return self._sync_wrapper(fn, *args, **kwargs) [ 546.332532] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 546.332532] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] self.wait() [ 546.332532] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 546.332532] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] self[:] = self._gt.wait() [ 546.332532] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 546.332532] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] return self._exit_event.wait() [ 546.332532] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 546.332532] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] result = hub.switch() [ 546.332532] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 546.332532] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] return self.greenlet.switch() [ 546.333962] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 546.333962] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] result = function(*args, **kwargs) [ 546.333962] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 546.333962] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] return func(*args, **kwargs) [ 546.333962] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 546.333962] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] raise e [ 546.333962] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 546.333962] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] nwinfo = self.network_api.allocate_for_instance( [ 546.333962] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 546.333962] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] created_port_ids = self._update_ports_for_instance( [ 546.333962] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 546.333962] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] with excutils.save_and_reraise_exception(): [ 546.333962] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 546.334673] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] self.force_reraise() [ 546.334673] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 546.334673] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] raise self.value [ 546.334673] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 546.334673] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] updated_port = self._update_port( [ 546.334673] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 546.334673] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] _ensure_no_port_binding_failure(port) [ 546.334673] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 546.334673] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] raise exception.PortBindingFailed(port_id=port['id']) [ 546.334673] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] nova.exception.PortBindingFailed: Binding failed for port e75e1c47-4c22-4d2c-8365-bf365e8ee881, please check neutron logs for more information. [ 546.334673] env[63345]: ERROR nova.compute.manager [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] [ 546.336132] env[63345]: DEBUG nova.compute.utils [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Binding failed for port e75e1c47-4c22-4d2c-8365-bf365e8ee881, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 546.336132] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.701s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.337424] env[63345]: INFO nova.compute.claims [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 546.341955] env[63345]: DEBUG nova.compute.manager [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Build of instance c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65 was re-scheduled: Binding failed for port e75e1c47-4c22-4d2c-8365-bf365e8ee881, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 546.342080] env[63345]: DEBUG nova.compute.manager [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 546.342325] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Acquiring lock "refresh_cache-c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.342440] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Acquired lock "refresh_cache-c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.342576] env[63345]: DEBUG nova.network.neutron [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 546.397937] env[63345]: DEBUG nova.compute.manager [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 546.440935] env[63345]: DEBUG nova.virt.hardware [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 546.441126] env[63345]: DEBUG nova.virt.hardware [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 546.441159] env[63345]: DEBUG nova.virt.hardware [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 546.441328] env[63345]: DEBUG nova.virt.hardware [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 546.441479] env[63345]: DEBUG nova.virt.hardware [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 546.441763] env[63345]: DEBUG nova.virt.hardware [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 546.441843] env[63345]: DEBUG nova.virt.hardware [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 546.441973] env[63345]: DEBUG nova.virt.hardware [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 546.444044] env[63345]: DEBUG nova.virt.hardware [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 546.444254] env[63345]: DEBUG nova.virt.hardware [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 546.444564] env[63345]: DEBUG nova.virt.hardware [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 546.447237] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3017d90d-83b1-48c4-82ac-e45cf13cb784 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.456624] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e7fa36-208f-4f3a-9675-c9ef5bb0ebfe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.514914] env[63345]: DEBUG nova.compute.manager [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 546.537997] env[63345]: ERROR nova.compute.manager [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b0c5ad0c-5682-4e54-b4c7-916cd8074721, please check neutron logs for more information. [ 546.537997] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 546.537997] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 546.537997] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 546.537997] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 546.537997] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 546.537997] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 546.537997] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 546.537997] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 546.537997] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 546.537997] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 546.537997] env[63345]: ERROR nova.compute.manager raise self.value [ 546.537997] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 546.537997] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 546.537997] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 546.537997] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 546.538458] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 546.538458] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 546.538458] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b0c5ad0c-5682-4e54-b4c7-916cd8074721, please check neutron logs for more information. [ 546.538458] env[63345]: ERROR nova.compute.manager [ 546.538458] env[63345]: Traceback (most recent call last): [ 546.538458] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 546.538458] env[63345]: listener.cb(fileno) [ 546.538458] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 546.538458] env[63345]: result = function(*args, **kwargs) [ 546.538458] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 546.538458] env[63345]: return func(*args, **kwargs) [ 546.538458] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 546.538458] env[63345]: raise e [ 546.538458] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 546.538458] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 546.538458] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 546.538458] env[63345]: created_port_ids = self._update_ports_for_instance( [ 546.538458] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 546.538458] env[63345]: with excutils.save_and_reraise_exception(): [ 546.538458] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 546.538458] env[63345]: self.force_reraise() [ 546.538458] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 546.538458] env[63345]: raise self.value [ 546.538458] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 546.538458] env[63345]: updated_port = self._update_port( [ 546.538458] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 546.538458] env[63345]: _ensure_no_port_binding_failure(port) [ 546.538458] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 546.538458] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 546.540149] env[63345]: nova.exception.PortBindingFailed: Binding failed for port b0c5ad0c-5682-4e54-b4c7-916cd8074721, please check neutron logs for more information. [ 546.540149] env[63345]: Removing descriptor: 19 [ 546.540149] env[63345]: ERROR nova.compute.manager [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b0c5ad0c-5682-4e54-b4c7-916cd8074721, please check neutron logs for more information. [ 546.540149] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Traceback (most recent call last): [ 546.540149] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 546.540149] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] yield resources [ 546.540149] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 546.540149] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] self.driver.spawn(context, instance, image_meta, [ 546.540149] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 546.540149] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] self._vmops.spawn(context, instance, image_meta, injected_files, [ 546.540149] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 546.540149] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] vm_ref = self.build_virtual_machine(instance, [ 546.541859] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 546.541859] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] vif_infos = vmwarevif.get_vif_info(self._session, [ 546.541859] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 546.541859] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] for vif in network_info: [ 546.541859] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 546.541859] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] return self._sync_wrapper(fn, *args, **kwargs) [ 546.541859] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 546.541859] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] self.wait() [ 546.541859] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 546.541859] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] self[:] = self._gt.wait() [ 546.541859] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 546.541859] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] return self._exit_event.wait() [ 546.541859] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 546.542369] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] result = hub.switch() [ 546.542369] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 546.542369] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] return self.greenlet.switch() [ 546.542369] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 546.542369] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] result = function(*args, **kwargs) [ 546.542369] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 546.542369] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] return func(*args, **kwargs) [ 546.542369] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 546.542369] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] raise e [ 546.542369] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 546.542369] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] nwinfo = self.network_api.allocate_for_instance( [ 546.542369] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 546.542369] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] created_port_ids = self._update_ports_for_instance( [ 546.543443] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 546.543443] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] with excutils.save_and_reraise_exception(): [ 546.543443] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 546.543443] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] self.force_reraise() [ 546.543443] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 546.543443] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] raise self.value [ 546.543443] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 546.543443] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] updated_port = self._update_port( [ 546.543443] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 546.543443] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] _ensure_no_port_binding_failure(port) [ 546.543443] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 546.543443] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] raise exception.PortBindingFailed(port_id=port['id']) [ 546.543945] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] nova.exception.PortBindingFailed: Binding failed for port b0c5ad0c-5682-4e54-b4c7-916cd8074721, please check neutron logs for more information. [ 546.543945] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] [ 546.543945] env[63345]: INFO nova.compute.manager [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Terminating instance [ 546.698531] env[63345]: DEBUG oslo_concurrency.lockutils [req-23e7cd42-a370-425f-a2b0-1a71414ba247 req-444ca8f0-14de-4736-9537-f5bd8857b9ab service nova] Releasing lock "refresh_cache-cec6ec60-5e8a-4c31-ba75-001f3c1980f0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.817915] env[63345]: DEBUG nova.network.neutron [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 546.905347] env[63345]: DEBUG nova.network.neutron [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 547.044142] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Acquiring lock "refresh_cache-64c4c933-2b89-409a-9b4c-eccc7f481b67" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.044142] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Acquired lock "refresh_cache-64c4c933-2b89-409a-9b4c-eccc7f481b67" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.044142] env[63345]: DEBUG nova.network.neutron [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 547.048252] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.054444] env[63345]: DEBUG nova.network.neutron [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.242462] env[63345]: DEBUG nova.network.neutron [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.560302] env[63345]: DEBUG nova.compute.manager [req-9f435563-29e6-4957-9f84-6c6098dac529 req-cffcb8ad-8bbf-4da6-9cf9-b56357cd67fb service nova] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Received event network-vif-deleted-fc3acfbf-5f06-437b-a030-e8eec8a877b8 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 547.562661] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Releasing lock "refresh_cache-51d6db80-9d1f-4e38-a564-f587474f6294" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 547.562661] env[63345]: DEBUG nova.compute.manager [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 547.562661] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 547.563660] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a3c7db7-5134-4b37-89c1-be27730af77b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.572530] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 547.575143] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88adfaae-8914-4996-85d3-95c0abb0956a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.583070] env[63345]: DEBUG oslo_vmware.api [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Waiting for the task: (returnval){ [ 547.583070] env[63345]: value = "task-1016639" [ 547.583070] env[63345]: _type = "Task" [ 547.583070] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.591354] env[63345]: DEBUG oslo_vmware.api [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016639, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.602800] env[63345]: DEBUG nova.network.neutron [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 547.653350] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d24eb8b7-4168-4976-a729-a3d9645c3fbc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.661049] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e4be91-7a13-4770-8c87-bb61367fa92b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.693644] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2731d4e4-6ba8-4863-ba2c-49b4ab09f532 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.703060] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6932edd4-43e7-4e4e-aa5b-d00e15ce46f0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.718936] env[63345]: DEBUG nova.compute.provider_tree [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 547.745278] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Releasing lock "refresh_cache-c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 547.745565] env[63345]: DEBUG nova.compute.manager [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 547.745565] env[63345]: DEBUG nova.compute.manager [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 547.745704] env[63345]: DEBUG nova.network.neutron [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 547.807447] env[63345]: DEBUG nova.network.neutron [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 547.874926] env[63345]: DEBUG nova.network.neutron [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.933539] env[63345]: INFO nova.compute.manager [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Rebuilding instance [ 547.994733] env[63345]: DEBUG nova.compute.manager [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 547.994733] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8664d9-0745-4e54-a289-dcc00e25d9f1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.099530] env[63345]: DEBUG oslo_vmware.api [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016639, 'name': PowerOffVM_Task, 'duration_secs': 0.118964} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.099530] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 548.099530] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 548.099765] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d59adf99-de21-4499-9a44-f7c238282806 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.124120] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 548.124344] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 548.124569] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Deleting the datastore file [datastore2] 51d6db80-9d1f-4e38-a564-f587474f6294 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 548.125124] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c926c0cf-7f5e-4e57-84bc-3e63b331b74a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.131183] env[63345]: DEBUG oslo_vmware.api [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Waiting for the task: (returnval){ [ 548.131183] env[63345]: value = "task-1016641" [ 548.131183] env[63345]: _type = "Task" [ 548.131183] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.139152] env[63345]: DEBUG oslo_vmware.api [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016641, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.223013] env[63345]: DEBUG nova.scheduler.client.report [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 548.315471] env[63345]: DEBUG nova.network.neutron [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.382673] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Releasing lock "refresh_cache-64c4c933-2b89-409a-9b4c-eccc7f481b67" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.382824] env[63345]: DEBUG nova.compute.manager [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 548.383031] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 548.383346] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fad5eac8-18ec-4a29-8c94-eb067ca30d11 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.395105] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71011f75-32da-4228-9142-e2fde347819f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.417555] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 64c4c933-2b89-409a-9b4c-eccc7f481b67 could not be found. [ 548.417811] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 548.417994] env[63345]: INFO nova.compute.manager [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Took 0.03 seconds to destroy the instance on the hypervisor. [ 548.418254] env[63345]: DEBUG oslo.service.loopingcall [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 548.418502] env[63345]: DEBUG nova.compute.manager [-] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 548.418594] env[63345]: DEBUG nova.network.neutron [-] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 548.445635] env[63345]: DEBUG nova.network.neutron [-] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 548.647043] env[63345]: DEBUG oslo_vmware.api [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Task: {'id': task-1016641, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091587} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.647043] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 548.647043] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 548.647043] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 548.647043] env[63345]: INFO nova.compute.manager [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Took 1.08 seconds to destroy the instance on the hypervisor. [ 548.647500] env[63345]: DEBUG oslo.service.loopingcall [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 548.651317] env[63345]: DEBUG nova.compute.manager [-] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 548.651430] env[63345]: DEBUG nova.network.neutron [-] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 548.656508] env[63345]: DEBUG nova.compute.manager [req-95634a1e-cf06-429a-af76-2040096dd22c req-4a41d911-0684-486d-a886-9bb0c0ad684a service nova] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Received event network-vif-deleted-ca7e4f78-240a-446f-ad69-0e78b3d99ee7 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 548.657041] env[63345]: DEBUG nova.compute.manager [req-95634a1e-cf06-429a-af76-2040096dd22c req-4a41d911-0684-486d-a886-9bb0c0ad684a service nova] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Received event network-changed-b0c5ad0c-5682-4e54-b4c7-916cd8074721 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 548.657041] env[63345]: DEBUG nova.compute.manager [req-95634a1e-cf06-429a-af76-2040096dd22c req-4a41d911-0684-486d-a886-9bb0c0ad684a service nova] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Refreshing instance network info cache due to event network-changed-b0c5ad0c-5682-4e54-b4c7-916cd8074721. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 548.657187] env[63345]: DEBUG oslo_concurrency.lockutils [req-95634a1e-cf06-429a-af76-2040096dd22c req-4a41d911-0684-486d-a886-9bb0c0ad684a service nova] Acquiring lock "refresh_cache-64c4c933-2b89-409a-9b4c-eccc7f481b67" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.657251] env[63345]: DEBUG oslo_concurrency.lockutils [req-95634a1e-cf06-429a-af76-2040096dd22c req-4a41d911-0684-486d-a886-9bb0c0ad684a service nova] Acquired lock "refresh_cache-64c4c933-2b89-409a-9b4c-eccc7f481b67" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.657391] env[63345]: DEBUG nova.network.neutron [req-95634a1e-cf06-429a-af76-2040096dd22c req-4a41d911-0684-486d-a886-9bb0c0ad684a service nova] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Refreshing network info cache for port b0c5ad0c-5682-4e54-b4c7-916cd8074721 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 548.691991] env[63345]: DEBUG nova.network.neutron [-] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 548.730045] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.393s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.730045] env[63345]: DEBUG nova.compute.manager [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 548.736322] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.133s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.740478] env[63345]: INFO nova.compute.claims [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 548.819776] env[63345]: INFO nova.compute.manager [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] [instance: c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65] Took 1.07 seconds to deallocate network for instance. [ 548.950926] env[63345]: DEBUG nova.network.neutron [-] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.011675] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 549.011962] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb4c7b6c-5e8e-4403-b801-8ab3ccc62fe4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.020978] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Waiting for the task: (returnval){ [ 549.020978] env[63345]: value = "task-1016642" [ 549.020978] env[63345]: _type = "Task" [ 549.020978] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.031051] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': task-1016642, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.089279] env[63345]: ERROR nova.compute.manager [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3fbb95f1-c342-46f0-9f12-4c6c1d67942d, please check neutron logs for more information. [ 549.089279] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 549.089279] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 549.089279] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 549.089279] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 549.089279] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 549.089279] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 549.089279] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 549.089279] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.089279] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 549.089279] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.089279] env[63345]: ERROR nova.compute.manager raise self.value [ 549.089279] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 549.089279] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 549.089279] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.089279] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 549.090389] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.090389] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 549.090389] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3fbb95f1-c342-46f0-9f12-4c6c1d67942d, please check neutron logs for more information. [ 549.090389] env[63345]: ERROR nova.compute.manager [ 549.090389] env[63345]: Traceback (most recent call last): [ 549.090389] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 549.090389] env[63345]: listener.cb(fileno) [ 549.090389] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.090389] env[63345]: result = function(*args, **kwargs) [ 549.090389] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 549.090389] env[63345]: return func(*args, **kwargs) [ 549.090389] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 549.090389] env[63345]: raise e [ 549.090389] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 549.090389] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 549.090389] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 549.090389] env[63345]: created_port_ids = self._update_ports_for_instance( [ 549.090389] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 549.090389] env[63345]: with excutils.save_and_reraise_exception(): [ 549.090389] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.090389] env[63345]: self.force_reraise() [ 549.090389] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.090389] env[63345]: raise self.value [ 549.090389] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 549.090389] env[63345]: updated_port = self._update_port( [ 549.090389] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.090389] env[63345]: _ensure_no_port_binding_failure(port) [ 549.090389] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.090389] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 549.093404] env[63345]: nova.exception.PortBindingFailed: Binding failed for port 3fbb95f1-c342-46f0-9f12-4c6c1d67942d, please check neutron logs for more information. [ 549.093404] env[63345]: Removing descriptor: 16 [ 549.093404] env[63345]: ERROR nova.compute.manager [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3fbb95f1-c342-46f0-9f12-4c6c1d67942d, please check neutron logs for more information. [ 549.093404] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Traceback (most recent call last): [ 549.093404] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 549.093404] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] yield resources [ 549.093404] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 549.093404] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] self.driver.spawn(context, instance, image_meta, [ 549.093404] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 549.093404] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] self._vmops.spawn(context, instance, image_meta, injected_files, [ 549.093404] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 549.093404] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] vm_ref = self.build_virtual_machine(instance, [ 549.093916] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 549.093916] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] vif_infos = vmwarevif.get_vif_info(self._session, [ 549.093916] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 549.093916] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] for vif in network_info: [ 549.093916] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 549.093916] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] return self._sync_wrapper(fn, *args, **kwargs) [ 549.093916] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 549.093916] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] self.wait() [ 549.093916] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 549.093916] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] self[:] = self._gt.wait() [ 549.093916] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 549.093916] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] return self._exit_event.wait() [ 549.093916] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 549.094693] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] result = hub.switch() [ 549.094693] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 549.094693] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] return self.greenlet.switch() [ 549.094693] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.094693] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] result = function(*args, **kwargs) [ 549.094693] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 549.094693] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] return func(*args, **kwargs) [ 549.094693] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 549.094693] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] raise e [ 549.094693] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 549.094693] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] nwinfo = self.network_api.allocate_for_instance( [ 549.094693] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 549.094693] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] created_port_ids = self._update_ports_for_instance( [ 549.095125] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 549.095125] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] with excutils.save_and_reraise_exception(): [ 549.095125] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.095125] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] self.force_reraise() [ 549.095125] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.095125] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] raise self.value [ 549.095125] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 549.095125] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] updated_port = self._update_port( [ 549.095125] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.095125] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] _ensure_no_port_binding_failure(port) [ 549.095125] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.095125] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] raise exception.PortBindingFailed(port_id=port['id']) [ 549.095651] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] nova.exception.PortBindingFailed: Binding failed for port 3fbb95f1-c342-46f0-9f12-4c6c1d67942d, please check neutron logs for more information. [ 549.095651] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] [ 549.095651] env[63345]: INFO nova.compute.manager [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Terminating instance [ 549.186573] env[63345]: DEBUG nova.network.neutron [req-95634a1e-cf06-429a-af76-2040096dd22c req-4a41d911-0684-486d-a886-9bb0c0ad684a service nova] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 549.196458] env[63345]: DEBUG nova.network.neutron [-] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.246764] env[63345]: DEBUG nova.compute.utils [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 549.254169] env[63345]: DEBUG nova.compute.manager [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 549.257085] env[63345]: DEBUG nova.network.neutron [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 549.343992] env[63345]: DEBUG nova.network.neutron [req-95634a1e-cf06-429a-af76-2040096dd22c req-4a41d911-0684-486d-a886-9bb0c0ad684a service nova] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.452670] env[63345]: INFO nova.compute.manager [-] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Took 1.03 seconds to deallocate network for instance. [ 549.455775] env[63345]: DEBUG nova.compute.claims [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 549.455775] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.531473] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': task-1016642, 'name': PowerOffVM_Task, 'duration_secs': 0.122618} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.531746] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 549.531961] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 549.532930] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bef1a18-9777-49e0-8c36-3af8777b0f16 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.542115] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 549.542171] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bc1cb2f8-bc04-4b97-bb26-26185025cdf8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.555394] env[63345]: DEBUG nova.policy [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b5e26277e4aa4c9291c5f2aabe14fcee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7f19113243074f40850dee93c561c5fb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 549.570656] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 549.571213] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 549.571213] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Deleting the datastore file [datastore2] 5ef55aca-0714-4b34-85f2-b6d53f97c2d0 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 549.571332] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf3204b7-5f67-42a7-8d56-6aa48507495c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.578161] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Waiting for the task: (returnval){ [ 549.578161] env[63345]: value = "task-1016644" [ 549.578161] env[63345]: _type = "Task" [ 549.578161] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.586370] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': task-1016644, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.600189] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Acquiring lock "refresh_cache-e525b0c2-55f9-43f2-9d4f-faf46c0cd559" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.600381] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Acquired lock "refresh_cache-e525b0c2-55f9-43f2-9d4f-faf46c0cd559" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.600797] env[63345]: DEBUG nova.network.neutron [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 549.702722] env[63345]: INFO nova.compute.manager [-] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Took 1.05 seconds to deallocate network for instance. [ 549.755804] env[63345]: DEBUG nova.compute.manager [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 549.849026] env[63345]: DEBUG oslo_concurrency.lockutils [req-95634a1e-cf06-429a-af76-2040096dd22c req-4a41d911-0684-486d-a886-9bb0c0ad684a service nova] Releasing lock "refresh_cache-64c4c933-2b89-409a-9b4c-eccc7f481b67" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.849168] env[63345]: DEBUG nova.compute.manager [req-95634a1e-cf06-429a-af76-2040096dd22c req-4a41d911-0684-486d-a886-9bb0c0ad684a service nova] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Received event network-vif-deleted-b0c5ad0c-5682-4e54-b4c7-916cd8074721 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 549.868126] env[63345]: INFO nova.scheduler.client.report [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Deleted allocations for instance c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65 [ 550.091930] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': task-1016644, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.245567} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.092258] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 550.092341] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 550.092475] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 550.149141] env[63345]: DEBUG nova.network.neutron [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 550.199997] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6794ce-2918-4852-b903-4f8fe1c4a6d7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.209396] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f49c1f0e-e78c-4363-b0e1-ae646c7f759c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.213629] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.243995] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d04e365f-d65a-4e6f-81f7-759dcc272ee5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.256930] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92976ecd-9218-44c6-a0a0-750591c4f6c0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.276027] env[63345]: DEBUG nova.compute.provider_tree [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 550.380444] env[63345]: DEBUG nova.network.neutron [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.381844] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ea571cd-8f6b-4257-8cc6-0ee9e2154c2b tempest-ServersTestFqdnHostnames-427992595 tempest-ServersTestFqdnHostnames-427992595-project-member] Lock "c37b5d05-14d7-4e9b-87f6-9fdcc4a1ad65" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.465s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.503641] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "35a5bd72-403b-467b-ad52-1a1bf4958dbb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.503641] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "35a5bd72-403b-467b-ad52-1a1bf4958dbb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.766554] env[63345]: DEBUG nova.compute.manager [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 550.771163] env[63345]: DEBUG nova.network.neutron [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Successfully created port: 29fbfacc-22e8-4e75-8ec6-1ffc12ff0bcc {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 550.781605] env[63345]: DEBUG nova.scheduler.client.report [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 550.800530] env[63345]: DEBUG nova.virt.hardware [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:34:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='114207718',id=26,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1773289289',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 550.800530] env[63345]: DEBUG nova.virt.hardware [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 550.800530] env[63345]: DEBUG nova.virt.hardware [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 550.800717] env[63345]: DEBUG nova.virt.hardware [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 550.800717] env[63345]: DEBUG nova.virt.hardware [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 550.800717] env[63345]: DEBUG nova.virt.hardware [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 550.800717] env[63345]: DEBUG nova.virt.hardware [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 550.800717] env[63345]: DEBUG nova.virt.hardware [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 550.800852] env[63345]: DEBUG nova.virt.hardware [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 550.800852] env[63345]: DEBUG nova.virt.hardware [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 550.800852] env[63345]: DEBUG nova.virt.hardware [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 550.800852] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d96da04b-8558-493d-991e-3b6f40d0709b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.812312] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ed70cf-737a-4d01-9150-b1f6f52ccae7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.883525] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Releasing lock "refresh_cache-e525b0c2-55f9-43f2-9d4f-faf46c0cd559" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.883965] env[63345]: DEBUG nova.compute.manager [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 550.884240] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 550.887724] env[63345]: DEBUG nova.compute.manager [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 550.887828] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-48faf50d-26ab-4dc3-87bc-33b0bffb1d93 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.904431] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb526c05-fd35-4ec9-b133-727d46eaaff1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.936870] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e525b0c2-55f9-43f2-9d4f-faf46c0cd559 could not be found. [ 550.936870] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 550.936963] env[63345]: INFO nova.compute.manager [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Took 0.05 seconds to destroy the instance on the hypervisor. [ 550.937191] env[63345]: DEBUG oslo.service.loopingcall [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 550.937500] env[63345]: DEBUG nova.compute.manager [-] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 550.937500] env[63345]: DEBUG nova.network.neutron [-] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 550.960925] env[63345]: DEBUG nova.network.neutron [-] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 551.147016] env[63345]: DEBUG nova.virt.hardware [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 551.147502] env[63345]: DEBUG nova.virt.hardware [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 551.147502] env[63345]: DEBUG nova.virt.hardware [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 551.148888] env[63345]: DEBUG nova.virt.hardware [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 551.148888] env[63345]: DEBUG nova.virt.hardware [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 551.148888] env[63345]: DEBUG nova.virt.hardware [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 551.148888] env[63345]: DEBUG nova.virt.hardware [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 551.148888] env[63345]: DEBUG nova.virt.hardware [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 551.149361] env[63345]: DEBUG nova.virt.hardware [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 551.149361] env[63345]: DEBUG nova.virt.hardware [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 551.149361] env[63345]: DEBUG nova.virt.hardware [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 551.150259] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd5388ac-d095-46d0-b83d-10ae779a08be {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.158798] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98a52b6-c658-489d-9487-4061315e7fef {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.175527] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Instance VIF info [] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 551.182346] env[63345]: DEBUG oslo.service.loopingcall [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 551.182346] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 551.182543] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-326ff30c-90a1-45ba-815c-ef2c3bd4ff48 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.208640] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 551.208640] env[63345]: value = "task-1016645" [ 551.208640] env[63345]: _type = "Task" [ 551.208640] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.215052] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016645, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.289022] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.551s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.289022] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 551.290708] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.452s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.292629] env[63345]: INFO nova.compute.claims [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 551.422020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.465198] env[63345]: DEBUG nova.network.neutron [-] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.634335] env[63345]: DEBUG nova.compute.manager [req-262abfdf-27fd-47bb-897a-abccfd512cdb req-8da8fcb7-6e3c-4786-aa71-ce3d7676d2fb service nova] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Received event network-changed-3fbb95f1-c342-46f0-9f12-4c6c1d67942d {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 551.634543] env[63345]: DEBUG nova.compute.manager [req-262abfdf-27fd-47bb-897a-abccfd512cdb req-8da8fcb7-6e3c-4786-aa71-ce3d7676d2fb service nova] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Refreshing instance network info cache due to event network-changed-3fbb95f1-c342-46f0-9f12-4c6c1d67942d. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 551.634747] env[63345]: DEBUG oslo_concurrency.lockutils [req-262abfdf-27fd-47bb-897a-abccfd512cdb req-8da8fcb7-6e3c-4786-aa71-ce3d7676d2fb service nova] Acquiring lock "refresh_cache-e525b0c2-55f9-43f2-9d4f-faf46c0cd559" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.634908] env[63345]: DEBUG oslo_concurrency.lockutils [req-262abfdf-27fd-47bb-897a-abccfd512cdb req-8da8fcb7-6e3c-4786-aa71-ce3d7676d2fb service nova] Acquired lock "refresh_cache-e525b0c2-55f9-43f2-9d4f-faf46c0cd559" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.635826] env[63345]: DEBUG nova.network.neutron [req-262abfdf-27fd-47bb-897a-abccfd512cdb req-8da8fcb7-6e3c-4786-aa71-ce3d7676d2fb service nova] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Refreshing network info cache for port 3fbb95f1-c342-46f0-9f12-4c6c1d67942d {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 551.717399] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016645, 'name': CreateVM_Task, 'duration_secs': 0.297043} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.717601] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 551.719290] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.719290] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.719411] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 551.719603] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee2db59f-8c3f-44e7-ae7c-225a8ec6a3a9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.725224] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Waiting for the task: (returnval){ [ 551.725224] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5214c27d-16b1-dfc2-5d0d-3fa5170d7810" [ 551.725224] env[63345]: _type = "Task" [ 551.725224] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.735070] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5214c27d-16b1-dfc2-5d0d-3fa5170d7810, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.797239] env[63345]: DEBUG nova.compute.utils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 551.805596] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 551.805782] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 551.968291] env[63345]: INFO nova.compute.manager [-] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Took 1.03 seconds to deallocate network for instance. [ 551.970936] env[63345]: DEBUG nova.compute.claims [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 551.971197] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.973639] env[63345]: DEBUG nova.policy [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e463e6e279284d979d49753ff4d07572', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea42057a148e453d869b4af82bdb21bb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 552.082340] env[63345]: DEBUG oslo_concurrency.lockutils [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Acquiring lock "fb2cdca8-441c-4edb-be11-6b89c19b3cad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.082607] env[63345]: DEBUG oslo_concurrency.lockutils [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Lock "fb2cdca8-441c-4edb-be11-6b89c19b3cad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.238519] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5214c27d-16b1-dfc2-5d0d-3fa5170d7810, 'name': SearchDatastore_Task, 'duration_secs': 0.014732} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.238813] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.239016] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 552.239261] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.239420] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.239612] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 552.239879] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a405b1a8-a313-4786-86ad-812ceed31ae1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.248984] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 552.249183] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 552.249880] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-417c638c-2d74-44d7-9b20-4ff289b6f022 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.254647] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Waiting for the task: (returnval){ [ 552.254647] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52ee52a2-1a43-ecb3-89a2-f4cc801fb8e6" [ 552.254647] env[63345]: _type = "Task" [ 552.254647] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.262359] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52ee52a2-1a43-ecb3-89a2-f4cc801fb8e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.281950] env[63345]: DEBUG nova.network.neutron [req-262abfdf-27fd-47bb-897a-abccfd512cdb req-8da8fcb7-6e3c-4786-aa71-ce3d7676d2fb service nova] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 552.306701] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 552.339350] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Successfully created port: 6afb7501-c3fd-4ca2-b6a8-f228b9a81260 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 552.497231] env[63345]: DEBUG nova.network.neutron [req-262abfdf-27fd-47bb-897a-abccfd512cdb req-8da8fcb7-6e3c-4786-aa71-ce3d7676d2fb service nova] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.664162] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf09aa3-9f08-4bbc-ab41-0fa9fbbdb21e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.676112] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765a3628-4a44-43e3-b088-a3847b8a2735 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.721255] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c037e60a-a4a9-4743-aefc-b235163dc36a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.730298] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da62d2e1-440d-46bb-b95a-936364eecd4c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.744888] env[63345]: DEBUG nova.compute.provider_tree [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 552.769640] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52ee52a2-1a43-ecb3-89a2-f4cc801fb8e6, 'name': SearchDatastore_Task, 'duration_secs': 0.007318} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.769640] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b053403-1daa-4bbb-94b4-348b34062a76 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.777165] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Waiting for the task: (returnval){ [ 552.777165] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f72b30-1ba3-72b6-7906-31478830ca3b" [ 552.777165] env[63345]: _type = "Task" [ 552.777165] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.787011] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f72b30-1ba3-72b6-7906-31478830ca3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.967800] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Acquiring lock "56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.968052] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Lock "56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.005347] env[63345]: DEBUG oslo_concurrency.lockutils [req-262abfdf-27fd-47bb-897a-abccfd512cdb req-8da8fcb7-6e3c-4786-aa71-ce3d7676d2fb service nova] Releasing lock "refresh_cache-e525b0c2-55f9-43f2-9d4f-faf46c0cd559" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.005347] env[63345]: DEBUG nova.compute.manager [req-262abfdf-27fd-47bb-897a-abccfd512cdb req-8da8fcb7-6e3c-4786-aa71-ce3d7676d2fb service nova] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Received event network-vif-deleted-3fbb95f1-c342-46f0-9f12-4c6c1d67942d {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 553.251668] env[63345]: DEBUG nova.scheduler.client.report [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 553.288134] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f72b30-1ba3-72b6-7906-31478830ca3b, 'name': SearchDatastore_Task, 'duration_secs': 0.009415} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.290354] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.290354] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 5ef55aca-0714-4b34-85f2-b6d53f97c2d0/5ef55aca-0714-4b34-85f2-b6d53f97c2d0.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 553.290354] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed68f7b7-f0f4-405a-b8cf-d51f4c951067 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.297681] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Waiting for the task: (returnval){ [ 553.297681] env[63345]: value = "task-1016650" [ 553.297681] env[63345]: _type = "Task" [ 553.297681] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.306599] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': task-1016650, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.320453] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 553.351032] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 553.351326] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 553.353000] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 553.353000] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 553.353000] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 553.355868] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 553.355868] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 553.355868] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 553.355868] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 553.355868] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 553.356047] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 553.356047] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89473514-8345-406e-9fa3-10eb322f36f6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.365244] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736df926-7bf2-4536-bcb0-791995935323 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.450379] env[63345]: ERROR nova.compute.manager [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 29fbfacc-22e8-4e75-8ec6-1ffc12ff0bcc, please check neutron logs for more information. [ 553.450379] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 553.450379] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 553.450379] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 553.450379] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 553.450379] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 553.450379] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 553.450379] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 553.450379] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.450379] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 553.450379] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.450379] env[63345]: ERROR nova.compute.manager raise self.value [ 553.450379] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 553.450379] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 553.450379] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.450379] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 553.450861] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.450861] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 553.450861] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 29fbfacc-22e8-4e75-8ec6-1ffc12ff0bcc, please check neutron logs for more information. [ 553.450861] env[63345]: ERROR nova.compute.manager [ 553.450861] env[63345]: Traceback (most recent call last): [ 553.450861] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 553.450861] env[63345]: listener.cb(fileno) [ 553.450861] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 553.450861] env[63345]: result = function(*args, **kwargs) [ 553.450861] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 553.450861] env[63345]: return func(*args, **kwargs) [ 553.450861] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 553.450861] env[63345]: raise e [ 553.450861] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 553.450861] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 553.450861] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 553.450861] env[63345]: created_port_ids = self._update_ports_for_instance( [ 553.450861] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 553.450861] env[63345]: with excutils.save_and_reraise_exception(): [ 553.450861] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.450861] env[63345]: self.force_reraise() [ 553.450861] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.450861] env[63345]: raise self.value [ 553.450861] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 553.450861] env[63345]: updated_port = self._update_port( [ 553.450861] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.450861] env[63345]: _ensure_no_port_binding_failure(port) [ 553.450861] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.450861] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 553.451663] env[63345]: nova.exception.PortBindingFailed: Binding failed for port 29fbfacc-22e8-4e75-8ec6-1ffc12ff0bcc, please check neutron logs for more information. [ 553.451663] env[63345]: Removing descriptor: 15 [ 553.451663] env[63345]: ERROR nova.compute.manager [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 29fbfacc-22e8-4e75-8ec6-1ffc12ff0bcc, please check neutron logs for more information. [ 553.451663] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Traceback (most recent call last): [ 553.451663] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 553.451663] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] yield resources [ 553.451663] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 553.451663] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] self.driver.spawn(context, instance, image_meta, [ 553.451663] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 553.451663] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 553.451663] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 553.451663] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] vm_ref = self.build_virtual_machine(instance, [ 553.452054] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 553.452054] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] vif_infos = vmwarevif.get_vif_info(self._session, [ 553.452054] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 553.452054] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] for vif in network_info: [ 553.452054] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 553.452054] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] return self._sync_wrapper(fn, *args, **kwargs) [ 553.452054] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 553.452054] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] self.wait() [ 553.452054] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 553.452054] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] self[:] = self._gt.wait() [ 553.452054] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 553.452054] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] return self._exit_event.wait() [ 553.452054] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 553.452462] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] result = hub.switch() [ 553.452462] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 553.452462] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] return self.greenlet.switch() [ 553.452462] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 553.452462] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] result = function(*args, **kwargs) [ 553.452462] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 553.452462] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] return func(*args, **kwargs) [ 553.452462] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 553.452462] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] raise e [ 553.452462] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 553.452462] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] nwinfo = self.network_api.allocate_for_instance( [ 553.452462] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 553.452462] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] created_port_ids = self._update_ports_for_instance( [ 553.452816] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 553.452816] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] with excutils.save_and_reraise_exception(): [ 553.452816] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.452816] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] self.force_reraise() [ 553.452816] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.452816] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] raise self.value [ 553.452816] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 553.452816] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] updated_port = self._update_port( [ 553.452816] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.452816] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] _ensure_no_port_binding_failure(port) [ 553.452816] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.452816] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] raise exception.PortBindingFailed(port_id=port['id']) [ 553.453222] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] nova.exception.PortBindingFailed: Binding failed for port 29fbfacc-22e8-4e75-8ec6-1ffc12ff0bcc, please check neutron logs for more information. [ 553.453222] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] [ 553.453222] env[63345]: INFO nova.compute.manager [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Terminating instance [ 553.757332] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.466s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.759387] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 553.760567] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 13.741s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.813157] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': task-1016650, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.965330] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Acquiring lock "refresh_cache-d467124f-0b2b-4108-90d1-40f149e55ff0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.965912] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Acquired lock "refresh_cache-d467124f-0b2b-4108-90d1-40f149e55ff0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.966282] env[63345]: DEBUG nova.network.neutron [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 554.109065] env[63345]: ERROR nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6afb7501-c3fd-4ca2-b6a8-f228b9a81260, please check neutron logs for more information. [ 554.109065] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 554.109065] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 554.109065] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 554.109065] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 554.109065] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 554.109065] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 554.109065] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 554.109065] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 554.109065] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 554.109065] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 554.109065] env[63345]: ERROR nova.compute.manager raise self.value [ 554.109065] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 554.109065] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 554.109065] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 554.109065] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 554.109544] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 554.109544] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 554.109544] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6afb7501-c3fd-4ca2-b6a8-f228b9a81260, please check neutron logs for more information. [ 554.109544] env[63345]: ERROR nova.compute.manager [ 554.109544] env[63345]: Traceback (most recent call last): [ 554.109544] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 554.109544] env[63345]: listener.cb(fileno) [ 554.109544] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 554.109544] env[63345]: result = function(*args, **kwargs) [ 554.109544] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 554.109544] env[63345]: return func(*args, **kwargs) [ 554.109544] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 554.109544] env[63345]: raise e [ 554.109544] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 554.109544] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 554.109544] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 554.109544] env[63345]: created_port_ids = self._update_ports_for_instance( [ 554.109544] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 554.109544] env[63345]: with excutils.save_and_reraise_exception(): [ 554.109544] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 554.109544] env[63345]: self.force_reraise() [ 554.109544] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 554.109544] env[63345]: raise self.value [ 554.109544] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 554.109544] env[63345]: updated_port = self._update_port( [ 554.109544] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 554.109544] env[63345]: _ensure_no_port_binding_failure(port) [ 554.109544] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 554.109544] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 554.110350] env[63345]: nova.exception.PortBindingFailed: Binding failed for port 6afb7501-c3fd-4ca2-b6a8-f228b9a81260, please check neutron logs for more information. [ 554.110350] env[63345]: Removing descriptor: 16 [ 554.110350] env[63345]: ERROR nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6afb7501-c3fd-4ca2-b6a8-f228b9a81260, please check neutron logs for more information. [ 554.110350] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] Traceback (most recent call last): [ 554.110350] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 554.110350] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] yield resources [ 554.110350] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 554.110350] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] self.driver.spawn(context, instance, image_meta, [ 554.110350] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 554.110350] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] self._vmops.spawn(context, instance, image_meta, injected_files, [ 554.110350] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 554.110350] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] vm_ref = self.build_virtual_machine(instance, [ 554.110680] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 554.110680] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] vif_infos = vmwarevif.get_vif_info(self._session, [ 554.110680] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 554.110680] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] for vif in network_info: [ 554.110680] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 554.110680] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] return self._sync_wrapper(fn, *args, **kwargs) [ 554.110680] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 554.110680] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] self.wait() [ 554.110680] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 554.110680] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] self[:] = self._gt.wait() [ 554.110680] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 554.110680] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] return self._exit_event.wait() [ 554.110680] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 554.111130] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] result = hub.switch() [ 554.111130] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 554.111130] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] return self.greenlet.switch() [ 554.111130] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 554.111130] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] result = function(*args, **kwargs) [ 554.111130] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 554.111130] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] return func(*args, **kwargs) [ 554.111130] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 554.111130] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] raise e [ 554.111130] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 554.111130] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] nwinfo = self.network_api.allocate_for_instance( [ 554.111130] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 554.111130] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] created_port_ids = self._update_ports_for_instance( [ 554.111603] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 554.111603] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] with excutils.save_and_reraise_exception(): [ 554.111603] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 554.111603] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] self.force_reraise() [ 554.111603] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 554.111603] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] raise self.value [ 554.111603] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 554.111603] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] updated_port = self._update_port( [ 554.111603] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 554.111603] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] _ensure_no_port_binding_failure(port) [ 554.111603] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 554.111603] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] raise exception.PortBindingFailed(port_id=port['id']) [ 554.112138] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] nova.exception.PortBindingFailed: Binding failed for port 6afb7501-c3fd-4ca2-b6a8-f228b9a81260, please check neutron logs for more information. [ 554.112138] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] [ 554.112138] env[63345]: INFO nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Terminating instance [ 554.127622] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "ee31689b-bf0b-4737-86c7-5451c763e603" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.127622] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "ee31689b-bf0b-4737-86c7-5451c763e603" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.173800] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "28caa5f5-141a-4ef9-abb3-33a1973d99cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.174137] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "28caa5f5-141a-4ef9-abb3-33a1973d99cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.228908] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "b4a7d6dd-98dc-49d8-b344-1878cd5a3f51" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.229510] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "b4a7d6dd-98dc-49d8-b344-1878cd5a3f51" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.264895] env[63345]: DEBUG nova.compute.utils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 554.273333] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 554.273520] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 554.311761] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': task-1016650, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515153} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.312408] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 5ef55aca-0714-4b34-85f2-b6d53f97c2d0/5ef55aca-0714-4b34-85f2-b6d53f97c2d0.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 554.312408] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 554.312630] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a08c6697-7cac-4045-88e9-92bae06a902e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.319814] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Waiting for the task: (returnval){ [ 554.319814] env[63345]: value = "task-1016652" [ 554.319814] env[63345]: _type = "Task" [ 554.319814] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.330864] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': task-1016652, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.349265] env[63345]: DEBUG nova.policy [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e463e6e279284d979d49753ff4d07572', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea42057a148e453d869b4af82bdb21bb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 554.493817] env[63345]: DEBUG nova.network.neutron [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 554.617614] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "refresh_cache-175ede99-48e4-43dc-b563-140f42244c97" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.618825] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquired lock "refresh_cache-175ede99-48e4-43dc-b563-140f42244c97" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.618825] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 554.677696] env[63345]: DEBUG nova.network.neutron [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.679965] env[63345]: DEBUG nova.compute.manager [req-1b418d6c-e8c4-4a12-9961-19a14231063f req-a72d9a3c-4023-4c92-828a-4066fc59d717 service nova] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Received event network-changed-29fbfacc-22e8-4e75-8ec6-1ffc12ff0bcc {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 554.680190] env[63345]: DEBUG nova.compute.manager [req-1b418d6c-e8c4-4a12-9961-19a14231063f req-a72d9a3c-4023-4c92-828a-4066fc59d717 service nova] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Refreshing instance network info cache due to event network-changed-29fbfacc-22e8-4e75-8ec6-1ffc12ff0bcc. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 554.680386] env[63345]: DEBUG oslo_concurrency.lockutils [req-1b418d6c-e8c4-4a12-9961-19a14231063f req-a72d9a3c-4023-4c92-828a-4066fc59d717 service nova] Acquiring lock "refresh_cache-d467124f-0b2b-4108-90d1-40f149e55ff0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.779339] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 554.813347] env[63345]: WARNING nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 51d6db80-9d1f-4e38-a564-f587474f6294 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 554.813503] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 5ef55aca-0714-4b34-85f2-b6d53f97c2d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.813627] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.813744] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance cec6ec60-5e8a-4c31-ba75-001f3c1980f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.813856] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 64c4c933-2b89-409a-9b4c-eccc7f481b67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.814187] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance e525b0c2-55f9-43f2-9d4f-faf46c0cd559 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.814320] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance d467124f-0b2b-4108-90d1-40f149e55ff0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.814441] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 175ede99-48e4-43dc-b563-140f42244c97 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.814551] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.843172] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': task-1016652, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.119171} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.843172] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 554.843172] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf21dfc-197c-43b5-9720-5179282a6538 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.872505] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 5ef55aca-0714-4b34-85f2-b6d53f97c2d0/5ef55aca-0714-4b34-85f2-b6d53f97c2d0.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 554.873578] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63899019-68aa-4d5e-965a-8c23cd3c8940 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.902165] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Waiting for the task: (returnval){ [ 554.902165] env[63345]: value = "task-1016653" [ 554.902165] env[63345]: _type = "Task" [ 554.902165] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.914789] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': task-1016653, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.946718] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Successfully created port: e9f0dd1d-5e7c-462c-94ba-afe000098ad6 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 554.999081] env[63345]: DEBUG nova.compute.manager [req-ca475674-45b2-42c0-ab78-f7ffe0e8ab1e req-9dab44af-27b7-4d38-9e2b-5717b61c27ab service nova] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Received event network-changed-6afb7501-c3fd-4ca2-b6a8-f228b9a81260 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 554.999081] env[63345]: DEBUG nova.compute.manager [req-ca475674-45b2-42c0-ab78-f7ffe0e8ab1e req-9dab44af-27b7-4d38-9e2b-5717b61c27ab service nova] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Refreshing instance network info cache due to event network-changed-6afb7501-c3fd-4ca2-b6a8-f228b9a81260. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 554.999081] env[63345]: DEBUG oslo_concurrency.lockutils [req-ca475674-45b2-42c0-ab78-f7ffe0e8ab1e req-9dab44af-27b7-4d38-9e2b-5717b61c27ab service nova] Acquiring lock "refresh_cache-175ede99-48e4-43dc-b563-140f42244c97" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.016611] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.016611] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.143635] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.184910] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Releasing lock "refresh_cache-d467124f-0b2b-4108-90d1-40f149e55ff0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.185360] env[63345]: DEBUG nova.compute.manager [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 555.185560] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 555.185925] env[63345]: DEBUG oslo_concurrency.lockutils [req-1b418d6c-e8c4-4a12-9961-19a14231063f req-a72d9a3c-4023-4c92-828a-4066fc59d717 service nova] Acquired lock "refresh_cache-d467124f-0b2b-4108-90d1-40f149e55ff0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.186205] env[63345]: DEBUG nova.network.neutron [req-1b418d6c-e8c4-4a12-9961-19a14231063f req-a72d9a3c-4023-4c92-828a-4066fc59d717 service nova] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Refreshing network info cache for port 29fbfacc-22e8-4e75-8ec6-1ffc12ff0bcc {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 555.188009] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-92252ec9-6328-4da5-9a2b-5c020af37dbb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.197937] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc4deb5-902c-47ca-b931-8fa7e04beb72 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.225223] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d467124f-0b2b-4108-90d1-40f149e55ff0 could not be found. [ 555.225619] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 555.225927] env[63345]: INFO nova.compute.manager [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 555.226338] env[63345]: DEBUG oslo.service.loopingcall [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 555.228802] env[63345]: DEBUG nova.compute.manager [-] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 555.229368] env[63345]: DEBUG nova.network.neutron [-] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 555.254994] env[63345]: DEBUG nova.network.neutron [-] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.318165] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 6e8ef6b9-4684-4685-949a-2e2868aa3fb7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 555.325378] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.414765] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': task-1016653, 'name': ReconfigVM_Task, 'duration_secs': 0.284767} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.415247] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 5ef55aca-0714-4b34-85f2-b6d53f97c2d0/5ef55aca-0714-4b34-85f2-b6d53f97c2d0.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 555.415892] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-547d0176-5754-4d47-a0b4-838070a1c393 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.423453] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Waiting for the task: (returnval){ [ 555.423453] env[63345]: value = "task-1016654" [ 555.423453] env[63345]: _type = "Task" [ 555.423453] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.433702] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': task-1016654, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.717964] env[63345]: DEBUG nova.network.neutron [req-1b418d6c-e8c4-4a12-9961-19a14231063f req-a72d9a3c-4023-4c92-828a-4066fc59d717 service nova] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.757418] env[63345]: DEBUG nova.network.neutron [-] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.791611] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 555.825341] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 1f595aef-799f-4ca4-be91-e95ef056926c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 555.830447] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 555.830733] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 555.831178] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 555.831397] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 555.831540] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 555.831702] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 555.831916] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 555.832094] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 555.832265] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 555.832433] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 555.832593] env[63345]: DEBUG nova.virt.hardware [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 555.834568] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Releasing lock "refresh_cache-175ede99-48e4-43dc-b563-140f42244c97" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.835680] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 555.835848] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 555.836665] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07b8a40-08f4-4b11-98d4-6723fb3e4d0c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.843018] env[63345]: DEBUG oslo_concurrency.lockutils [req-ca475674-45b2-42c0-ab78-f7ffe0e8ab1e req-9dab44af-27b7-4d38-9e2b-5717b61c27ab service nova] Acquired lock "refresh_cache-175ede99-48e4-43dc-b563-140f42244c97" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.843018] env[63345]: DEBUG nova.network.neutron [req-ca475674-45b2-42c0-ab78-f7ffe0e8ab1e req-9dab44af-27b7-4d38-9e2b-5717b61c27ab service nova] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Refreshing network info cache for port 6afb7501-c3fd-4ca2-b6a8-f228b9a81260 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 555.843639] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-15890092-928f-4bd3-8b61-619a07814de8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.857277] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8ba7d7-e5fd-4268-9db2-159b2d35c293 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.865236] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f889ee0-da8d-468e-b781-815c8f5d0f12 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.889336] env[63345]: DEBUG nova.network.neutron [req-1b418d6c-e8c4-4a12-9961-19a14231063f req-a72d9a3c-4023-4c92-828a-4066fc59d717 service nova] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.896309] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 175ede99-48e4-43dc-b563-140f42244c97 could not be found. [ 555.896558] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 555.896761] env[63345]: INFO nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Took 0.06 seconds to destroy the instance on the hypervisor. [ 555.897812] env[63345]: DEBUG oslo.service.loopingcall [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 555.897911] env[63345]: DEBUG nova.compute.manager [-] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 555.897998] env[63345]: DEBUG nova.network.neutron [-] [instance: 175ede99-48e4-43dc-b563-140f42244c97] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 555.925700] env[63345]: DEBUG nova.network.neutron [-] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.941374] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': task-1016654, 'name': Rename_Task, 'duration_secs': 0.155017} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.941374] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 555.941374] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e41e133-5ea4-450f-a8a5-3e542a16f043 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.952054] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Waiting for the task: (returnval){ [ 555.952054] env[63345]: value = "task-1016655" [ 555.952054] env[63345]: _type = "Task" [ 555.952054] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.965349] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': task-1016655, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.166479] env[63345]: ERROR nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e9f0dd1d-5e7c-462c-94ba-afe000098ad6, please check neutron logs for more information. [ 556.166479] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 556.166479] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 556.166479] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 556.166479] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 556.166479] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 556.166479] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 556.166479] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 556.166479] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 556.166479] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 556.166479] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 556.166479] env[63345]: ERROR nova.compute.manager raise self.value [ 556.166479] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 556.166479] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 556.166479] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 556.166479] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 556.167078] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 556.167078] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 556.167078] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e9f0dd1d-5e7c-462c-94ba-afe000098ad6, please check neutron logs for more information. [ 556.167078] env[63345]: ERROR nova.compute.manager [ 556.167078] env[63345]: Traceback (most recent call last): [ 556.167078] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 556.167078] env[63345]: listener.cb(fileno) [ 556.167078] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 556.167078] env[63345]: result = function(*args, **kwargs) [ 556.167078] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 556.167078] env[63345]: return func(*args, **kwargs) [ 556.167078] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 556.167078] env[63345]: raise e [ 556.167078] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 556.167078] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 556.167078] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 556.167078] env[63345]: created_port_ids = self._update_ports_for_instance( [ 556.167078] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 556.167078] env[63345]: with excutils.save_and_reraise_exception(): [ 556.167078] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 556.167078] env[63345]: self.force_reraise() [ 556.167078] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 556.167078] env[63345]: raise self.value [ 556.167078] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 556.167078] env[63345]: updated_port = self._update_port( [ 556.167078] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 556.167078] env[63345]: _ensure_no_port_binding_failure(port) [ 556.167078] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 556.167078] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 556.167968] env[63345]: nova.exception.PortBindingFailed: Binding failed for port e9f0dd1d-5e7c-462c-94ba-afe000098ad6, please check neutron logs for more information. [ 556.167968] env[63345]: Removing descriptor: 16 [ 556.167968] env[63345]: ERROR nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e9f0dd1d-5e7c-462c-94ba-afe000098ad6, please check neutron logs for more information. [ 556.167968] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Traceback (most recent call last): [ 556.167968] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 556.167968] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] yield resources [ 556.167968] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 556.167968] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] self.driver.spawn(context, instance, image_meta, [ 556.167968] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 556.167968] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 556.167968] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 556.167968] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] vm_ref = self.build_virtual_machine(instance, [ 556.168358] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 556.168358] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] vif_infos = vmwarevif.get_vif_info(self._session, [ 556.168358] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 556.168358] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] for vif in network_info: [ 556.168358] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 556.168358] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] return self._sync_wrapper(fn, *args, **kwargs) [ 556.168358] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 556.168358] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] self.wait() [ 556.168358] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 556.168358] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] self[:] = self._gt.wait() [ 556.168358] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 556.168358] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] return self._exit_event.wait() [ 556.168358] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 556.168780] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] result = hub.switch() [ 556.168780] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 556.168780] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] return self.greenlet.switch() [ 556.168780] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 556.168780] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] result = function(*args, **kwargs) [ 556.168780] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 556.168780] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] return func(*args, **kwargs) [ 556.168780] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 556.168780] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] raise e [ 556.168780] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 556.168780] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] nwinfo = self.network_api.allocate_for_instance( [ 556.168780] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 556.168780] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] created_port_ids = self._update_ports_for_instance( [ 556.169208] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 556.169208] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] with excutils.save_and_reraise_exception(): [ 556.169208] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 556.169208] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] self.force_reraise() [ 556.169208] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 556.169208] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] raise self.value [ 556.169208] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 556.169208] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] updated_port = self._update_port( [ 556.169208] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 556.169208] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] _ensure_no_port_binding_failure(port) [ 556.169208] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 556.169208] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] raise exception.PortBindingFailed(port_id=port['id']) [ 556.169561] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] nova.exception.PortBindingFailed: Binding failed for port e9f0dd1d-5e7c-462c-94ba-afe000098ad6, please check neutron logs for more information. [ 556.169561] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] [ 556.169561] env[63345]: INFO nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Terminating instance [ 556.261785] env[63345]: INFO nova.compute.manager [-] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Took 1.03 seconds to deallocate network for instance. [ 556.263251] env[63345]: DEBUG nova.compute.claims [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 556.264590] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.271440] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Acquiring lock "04fd7aaa-658d-480d-8465-825f120477bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.271977] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Lock "04fd7aaa-658d-480d-8465-825f120477bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.343360] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 556.377663] env[63345]: DEBUG nova.network.neutron [req-ca475674-45b2-42c0-ab78-f7ffe0e8ab1e req-9dab44af-27b7-4d38-9e2b-5717b61c27ab service nova] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 556.398535] env[63345]: DEBUG oslo_concurrency.lockutils [req-1b418d6c-e8c4-4a12-9961-19a14231063f req-a72d9a3c-4023-4c92-828a-4066fc59d717 service nova] Releasing lock "refresh_cache-d467124f-0b2b-4108-90d1-40f149e55ff0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.433689] env[63345]: DEBUG nova.network.neutron [-] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.467426] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': task-1016655, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.635606] env[63345]: DEBUG nova.network.neutron [req-ca475674-45b2-42c0-ab78-f7ffe0e8ab1e req-9dab44af-27b7-4d38-9e2b-5717b61c27ab service nova] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.677125] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "refresh_cache-d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.677125] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquired lock "refresh_cache-d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.677125] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 556.848338] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 1a54db9b-0482-4038-a505-46447f0c33ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 556.939501] env[63345]: INFO nova.compute.manager [-] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Took 1.04 seconds to deallocate network for instance. [ 556.940679] env[63345]: DEBUG nova.compute.claims [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 556.941104] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.967125] env[63345]: DEBUG oslo_vmware.api [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Task: {'id': task-1016655, 'name': PowerOnVM_Task, 'duration_secs': 0.533866} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.967735] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 556.968119] env[63345]: DEBUG nova.compute.manager [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 556.971424] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32551c7f-6c58-45f9-b2f7-59b9f5177f57 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.144949] env[63345]: DEBUG oslo_concurrency.lockutils [req-ca475674-45b2-42c0-ab78-f7ffe0e8ab1e req-9dab44af-27b7-4d38-9e2b-5717b61c27ab service nova] Releasing lock "refresh_cache-175ede99-48e4-43dc-b563-140f42244c97" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.206197] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 557.334344] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Acquiring lock "46d3332a-bfb9-4812-8201-a87467ce5151" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.336159] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Lock "46d3332a-bfb9-4812-8201-a87467ce5151" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.348353] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.354788] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 557.491894] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.854323] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Releasing lock "refresh_cache-d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.854782] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 557.854992] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 557.855717] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 079cd9f1-4753-4298-9b06-c3b9925d2982 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 557.856966] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-028a48c1-d8e1-4857-bff8-54c09e1b3426 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.870466] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903940fa-7c25-4d19-9e47-c7a898e202c1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.899573] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc could not be found. [ 557.899831] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 557.900029] env[63345]: INFO nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Took 0.05 seconds to destroy the instance on the hypervisor. [ 557.900309] env[63345]: DEBUG oslo.service.loopingcall [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 557.900776] env[63345]: DEBUG nova.compute.manager [-] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 557.900874] env[63345]: DEBUG nova.network.neutron [-] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 557.907494] env[63345]: DEBUG nova.compute.manager [req-1fe07bef-8b51-4622-a17f-657a02e6769f req-71a87fe8-63a4-4f53-8993-6d222ed6b5d4 service nova] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Received event network-vif-deleted-29fbfacc-22e8-4e75-8ec6-1ffc12ff0bcc {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 557.920303] env[63345]: DEBUG nova.network.neutron [-] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 558.032219] env[63345]: DEBUG oslo_concurrency.lockutils [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "27c6dc17-4ded-4fe7-8fba-265eae64fc32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.032453] env[63345]: DEBUG oslo_concurrency.lockutils [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "27c6dc17-4ded-4fe7-8fba-265eae64fc32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.173422] env[63345]: DEBUG nova.compute.manager [req-e244fb40-80ea-4921-80a6-1bdea5f64503 req-27b76946-c800-466a-9325-ac62bff1b67f service nova] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Received event network-vif-deleted-6afb7501-c3fd-4ca2-b6a8-f228b9a81260 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 558.173422] env[63345]: DEBUG nova.compute.manager [req-e244fb40-80ea-4921-80a6-1bdea5f64503 req-27b76946-c800-466a-9325-ac62bff1b67f service nova] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Received event network-changed-e9f0dd1d-5e7c-462c-94ba-afe000098ad6 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 558.174409] env[63345]: DEBUG nova.compute.manager [req-e244fb40-80ea-4921-80a6-1bdea5f64503 req-27b76946-c800-466a-9325-ac62bff1b67f service nova] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Refreshing instance network info cache due to event network-changed-e9f0dd1d-5e7c-462c-94ba-afe000098ad6. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 558.174694] env[63345]: DEBUG oslo_concurrency.lockutils [req-e244fb40-80ea-4921-80a6-1bdea5f64503 req-27b76946-c800-466a-9325-ac62bff1b67f service nova] Acquiring lock "refresh_cache-d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.174822] env[63345]: DEBUG oslo_concurrency.lockutils [req-e244fb40-80ea-4921-80a6-1bdea5f64503 req-27b76946-c800-466a-9325-ac62bff1b67f service nova] Acquired lock "refresh_cache-d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.175083] env[63345]: DEBUG nova.network.neutron [req-e244fb40-80ea-4921-80a6-1bdea5f64503 req-27b76946-c800-466a-9325-ac62bff1b67f service nova] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Refreshing network info cache for port e9f0dd1d-5e7c-462c-94ba-afe000098ad6 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 558.366570] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 2889c4d1-ac1b-404d-a4f7-2b908557348d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 558.422522] env[63345]: DEBUG nova.network.neutron [-] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.742403] env[63345]: DEBUG nova.network.neutron [req-e244fb40-80ea-4921-80a6-1bdea5f64503 req-27b76946-c800-466a-9325-ac62bff1b67f service nova] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 558.867017] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Acquiring lock "5ef55aca-0714-4b34-85f2-b6d53f97c2d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.867595] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Lock "5ef55aca-0714-4b34-85f2-b6d53f97c2d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.867877] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Acquiring lock "5ef55aca-0714-4b34-85f2-b6d53f97c2d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.868119] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Lock "5ef55aca-0714-4b34-85f2-b6d53f97c2d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.868529] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Lock "5ef55aca-0714-4b34-85f2-b6d53f97c2d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.871502] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 35a5bd72-403b-467b-ad52-1a1bf4958dbb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 558.877146] env[63345]: INFO nova.compute.manager [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Terminating instance [ 558.917119] env[63345]: DEBUG nova.network.neutron [req-e244fb40-80ea-4921-80a6-1bdea5f64503 req-27b76946-c800-466a-9325-ac62bff1b67f service nova] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.926912] env[63345]: INFO nova.compute.manager [-] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Took 1.03 seconds to deallocate network for instance. [ 558.931145] env[63345]: DEBUG nova.compute.claims [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 558.931145] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.378861] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance fb2cdca8-441c-4edb-be11-6b89c19b3cad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 559.382022] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Acquiring lock "refresh_cache-5ef55aca-0714-4b34-85f2-b6d53f97c2d0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.382022] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Acquired lock "refresh_cache-5ef55aca-0714-4b34-85f2-b6d53f97c2d0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.382022] env[63345]: DEBUG nova.network.neutron [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 559.423317] env[63345]: DEBUG oslo_concurrency.lockutils [req-e244fb40-80ea-4921-80a6-1bdea5f64503 req-27b76946-c800-466a-9325-ac62bff1b67f service nova] Releasing lock "refresh_cache-d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.423317] env[63345]: DEBUG nova.compute.manager [req-e244fb40-80ea-4921-80a6-1bdea5f64503 req-27b76946-c800-466a-9325-ac62bff1b67f service nova] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Received event network-vif-deleted-e9f0dd1d-5e7c-462c-94ba-afe000098ad6 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 559.884545] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 559.919369] env[63345]: DEBUG nova.network.neutron [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 560.021929] env[63345]: DEBUG nova.network.neutron [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.390448] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance ee31689b-bf0b-4737-86c7-5451c763e603 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 560.528315] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Releasing lock "refresh_cache-5ef55aca-0714-4b34-85f2-b6d53f97c2d0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.528315] env[63345]: DEBUG nova.compute.manager [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 560.528315] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 560.528315] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6375e4-d9f0-4685-b70d-6e26fcfdd439 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.543525] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 560.544011] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ae26c7a-31ed-4eec-bee9-eb32f2b90bd2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.552824] env[63345]: DEBUG oslo_vmware.api [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 560.552824] env[63345]: value = "task-1016658" [ 560.552824] env[63345]: _type = "Task" [ 560.552824] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.565478] env[63345]: DEBUG oslo_vmware.api [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016658, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.893880] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 28caa5f5-141a-4ef9-abb3-33a1973d99cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 561.063983] env[63345]: DEBUG oslo_vmware.api [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016658, 'name': PowerOffVM_Task, 'duration_secs': 0.129905} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.064263] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 561.064444] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 561.064723] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3bf7448-3941-4edb-b5cb-81656b520b69 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.105022] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 561.105022] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 561.105022] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Deleting the datastore file [datastore2] 5ef55aca-0714-4b34-85f2-b6d53f97c2d0 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 561.105022] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b6f9551-55a3-4355-90e8-61bdb9a149eb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.112571] env[63345]: DEBUG oslo_vmware.api [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for the task: (returnval){ [ 561.112571] env[63345]: value = "task-1016661" [ 561.112571] env[63345]: _type = "Task" [ 561.112571] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.125740] env[63345]: DEBUG oslo_vmware.api [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016661, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.397642] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance b4a7d6dd-98dc-49d8-b344-1878cd5a3f51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 561.624516] env[63345]: DEBUG oslo_vmware.api [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Task: {'id': task-1016661, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.28384} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.624892] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 561.625672] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 561.625672] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 561.625827] env[63345]: INFO nova.compute.manager [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Took 1.10 seconds to destroy the instance on the hypervisor. [ 561.626076] env[63345]: DEBUG oslo.service.loopingcall [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 561.626497] env[63345]: DEBUG nova.compute.manager [-] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 561.627763] env[63345]: DEBUG nova.network.neutron [-] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 561.649971] env[63345]: DEBUG nova.network.neutron [-] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 561.870801] env[63345]: DEBUG oslo_concurrency.lockutils [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "070a834d-6478-4705-8df0-2a27c8780507" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.873158] env[63345]: DEBUG oslo_concurrency.lockutils [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "070a834d-6478-4705-8df0-2a27c8780507" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.904912] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance bc9d2e6a-f77a-4a21-90bc-81949cbfce91 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 561.905196] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 561.905405] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 562.152801] env[63345]: DEBUG nova.network.neutron [-] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.360138] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980b2f14-e332-41be-bb8b-31f7a474900f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.371898] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a879bb4e-2909-4461-8a4b-85108c81f27e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.407746] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c066a037-2a8f-441e-bc51-39f06eeaad66 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.421025] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d675a40-a226-4263-824e-39dcc971e555 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.439040] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 562.653886] env[63345]: INFO nova.compute.manager [-] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Took 1.03 seconds to deallocate network for instance. [ 562.942283] env[63345]: DEBUG nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 563.165370] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.452015] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63345) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 563.452413] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.692s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.452717] env[63345]: DEBUG oslo_concurrency.lockutils [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 19.656s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.452924] env[63345]: DEBUG nova.objects.instance [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63345) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 564.464119] env[63345]: DEBUG oslo_concurrency.lockutils [None req-559916e3-7dbd-4732-bb2a-51d0cd026667 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.464357] env[63345]: DEBUG oslo_concurrency.lockutils [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.451s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.576229] env[63345]: DEBUG oslo_concurrency.lockutils [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Acquiring lock "4d41f4a7-4fde-4d34-be7c-533c00fe5ae6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.577261] env[63345]: DEBUG oslo_concurrency.lockutils [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Lock "4d41f4a7-4fde-4d34-be7c-533c00fe5ae6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.448147] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f3e6b5-8e7a-4819-adc8-f8680f970e8e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.458597] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2cfb3c7-87db-4b10-ba1e-aca43c3b0b0d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.494060] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b3a4ae-d70b-4605-bff3-c217edc7c6de {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.502537] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ea0e0b-08a3-47eb-ac11-a2942758c8fe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.518301] env[63345]: DEBUG nova.compute.provider_tree [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 566.049600] env[63345]: ERROR nova.scheduler.client.report [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [req-94deda2c-e2c9-4229-aba3-a3cf6e75a496] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-94deda2c-e2c9-4229-aba3-a3cf6e75a496"}]}: nova.exception.PortBindingFailed: Binding failed for port fc3acfbf-5f06-437b-a030-e8eec8a877b8, please check neutron logs for more information. [ 566.069565] env[63345]: DEBUG nova.scheduler.client.report [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 566.089307] env[63345]: DEBUG nova.scheduler.client.report [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 566.089539] env[63345]: DEBUG nova.compute.provider_tree [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 566.104273] env[63345]: DEBUG nova.scheduler.client.report [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 566.134101] env[63345]: DEBUG nova.scheduler.client.report [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 566.616526] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb64afd6-d0cf-48c5-a009-7df8a5f1e8ad {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.626778] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53a8745-67d0-4a31-a28a-636736933d5d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.660661] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c8e2e1-1774-483a-93b4-f1de25492e05 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.673392] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fd21ab-1cba-48fe-bdc7-d47c7030523b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.693503] env[63345]: DEBUG nova.compute.provider_tree [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 567.224192] env[63345]: ERROR nova.scheduler.client.report [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [req-905d1b30-31e0-4dcd-aaa4-d73bf0a1c40f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-905d1b30-31e0-4dcd-aaa4-d73bf0a1c40f"}]}: nova.exception.PortBindingFailed: Binding failed for port fc3acfbf-5f06-437b-a030-e8eec8a877b8, please check neutron logs for more information. [ 567.246398] env[63345]: DEBUG nova.scheduler.client.report [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 567.261563] env[63345]: DEBUG nova.scheduler.client.report [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 567.261563] env[63345]: DEBUG nova.compute.provider_tree [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 567.276264] env[63345]: DEBUG nova.scheduler.client.report [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 567.283149] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Acquiring lock "805f9143-a8d8-4995-a20d-3b10ef3ab599" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.283788] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Lock "805f9143-a8d8-4995-a20d-3b10ef3ab599" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.300635] env[63345]: DEBUG nova.scheduler.client.report [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 567.736539] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a2e2d8-66be-4c7f-bb26-38bfb2975cdd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.746208] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a72ed67f-d6e7-4b06-8734-ab41b80ea108 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.780113] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e255d1-37b2-484e-82d2-45b789ab98ad {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.788289] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-680d4010-16d4-49c1-85a8-21d6ea0cd8b6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.802351] env[63345]: DEBUG nova.compute.provider_tree [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 568.340085] env[63345]: DEBUG nova.scheduler.client.report [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 41 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 568.340366] env[63345]: DEBUG nova.compute.provider_tree [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 41 to 42 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 568.340550] env[63345]: DEBUG nova.compute.provider_tree [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 568.850775] env[63345]: DEBUG oslo_concurrency.lockutils [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 4.384s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.850775] env[63345]: ERROR nova.compute.manager [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fc3acfbf-5f06-437b-a030-e8eec8a877b8, please check neutron logs for more information. [ 568.850775] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Traceback (most recent call last): [ 568.850775] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 568.850775] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] self.driver.spawn(context, instance, image_meta, [ 568.850775] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 568.850775] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] self._vmops.spawn(context, instance, image_meta, injected_files, [ 568.850775] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 568.850775] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] vm_ref = self.build_virtual_machine(instance, [ 568.851215] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 568.851215] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] vif_infos = vmwarevif.get_vif_info(self._session, [ 568.851215] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 568.851215] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] for vif in network_info: [ 568.851215] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 568.851215] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] return self._sync_wrapper(fn, *args, **kwargs) [ 568.851215] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 568.851215] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] self.wait() [ 568.851215] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 568.851215] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] self[:] = self._gt.wait() [ 568.851215] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 568.851215] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] return self._exit_event.wait() [ 568.851215] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 568.851573] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] result = hub.switch() [ 568.851573] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 568.851573] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] return self.greenlet.switch() [ 568.851573] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 568.851573] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] result = function(*args, **kwargs) [ 568.851573] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 568.851573] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] return func(*args, **kwargs) [ 568.851573] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 568.851573] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] raise e [ 568.851573] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 568.851573] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] nwinfo = self.network_api.allocate_for_instance( [ 568.851573] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 568.851573] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] created_port_ids = self._update_ports_for_instance( [ 568.851929] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 568.851929] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] with excutils.save_and_reraise_exception(): [ 568.851929] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 568.851929] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] self.force_reraise() [ 568.851929] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 568.851929] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] raise self.value [ 568.851929] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 568.851929] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] updated_port = self._update_port( [ 568.851929] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 568.851929] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] _ensure_no_port_binding_failure(port) [ 568.851929] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 568.851929] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] raise exception.PortBindingFailed(port_id=port['id']) [ 568.852267] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] nova.exception.PortBindingFailed: Binding failed for port fc3acfbf-5f06-437b-a030-e8eec8a877b8, please check neutron logs for more information. [ 568.852267] env[63345]: ERROR nova.compute.manager [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] [ 568.852267] env[63345]: DEBUG nova.compute.utils [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Binding failed for port fc3acfbf-5f06-437b-a030-e8eec8a877b8, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 568.853782] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.070s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.855254] env[63345]: INFO nova.compute.claims [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 568.858068] env[63345]: DEBUG nova.compute.manager [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Build of instance 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68 was re-scheduled: Binding failed for port fc3acfbf-5f06-437b-a030-e8eec8a877b8, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 568.858539] env[63345]: DEBUG nova.compute.manager [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 568.858791] env[63345]: DEBUG oslo_concurrency.lockutils [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Acquiring lock "refresh_cache-1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.858940] env[63345]: DEBUG oslo_concurrency.lockutils [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Acquired lock "refresh_cache-1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.859125] env[63345]: DEBUG nova.network.neutron [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 569.386708] env[63345]: DEBUG nova.network.neutron [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 569.492202] env[63345]: DEBUG nova.network.neutron [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.994488] env[63345]: DEBUG oslo_concurrency.lockutils [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Releasing lock "refresh_cache-1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.994914] env[63345]: DEBUG nova.compute.manager [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 569.994914] env[63345]: DEBUG nova.compute.manager [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 569.995065] env[63345]: DEBUG nova.network.neutron [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 570.015922] env[63345]: DEBUG nova.network.neutron [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 570.303998] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "3e4e58bd-903b-4b3d-8be4-5678aab6c721" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.303998] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "3e4e58bd-903b-4b3d-8be4-5678aab6c721" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.315222] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a87c90-246c-45ff-8f02-a4c58e14bb43 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.322347] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb515e07-a761-4688-9c20-3d1b3518b1f4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.353801] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9fb7929-515b-49eb-b1f7-8f93f7cbe920 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.366848] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b5143d9-447d-4746-aa51-de5f9d7ac555 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.384620] env[63345]: DEBUG nova.compute.provider_tree [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 570.518083] env[63345]: DEBUG nova.network.neutron [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.854542] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "e6bc8cb9-2f1a-49cb-974d-ea9a211126ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.854851] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "e6bc8cb9-2f1a-49cb-974d-ea9a211126ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.887309] env[63345]: DEBUG nova.scheduler.client.report [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 570.997050] env[63345]: DEBUG oslo_concurrency.lockutils [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Acquiring lock "7bef089c-e93b-4ba6-a683-4e076489f92a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.997299] env[63345]: DEBUG oslo_concurrency.lockutils [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Lock "7bef089c-e93b-4ba6-a683-4e076489f92a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.020978] env[63345]: INFO nova.compute.manager [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] [instance: 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68] Took 1.03 seconds to deallocate network for instance. [ 571.394980] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.541s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.395542] env[63345]: DEBUG nova.compute.manager [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 571.398548] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.074s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.560330] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "0d5cb238-2d25-47b1-8ce6-15a20836dbfb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.561017] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "0d5cb238-2d25-47b1-8ce6-15a20836dbfb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.755251] env[63345]: DEBUG oslo_concurrency.lockutils [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Acquiring lock "c07c7f5d-a674-458f-8253-1bc2d61be6c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.755402] env[63345]: DEBUG oslo_concurrency.lockutils [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Lock "c07c7f5d-a674-458f-8253-1bc2d61be6c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.905131] env[63345]: DEBUG nova.compute.utils [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 571.913078] env[63345]: DEBUG nova.compute.manager [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 571.913078] env[63345]: DEBUG nova.network.neutron [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 572.001172] env[63345]: DEBUG nova.policy [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f23f5478fa3447419476495cda21568e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cffff9ec3b5b4bf8b400039c7d461ed8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 572.053134] env[63345]: INFO nova.scheduler.client.report [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Deleted allocations for instance 1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68 [ 572.421647] env[63345]: DEBUG nova.compute.manager [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 572.475066] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28220512-5d3a-4732-b591-d115d58833a0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.484421] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03e2f4d-98bd-47af-bf31-a3658a09b20c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.525022] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc71d6e-dc7f-4d6d-befb-eb80c9f0554e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.527341] env[63345]: DEBUG nova.network.neutron [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Successfully created port: 1072c48c-5de1-46dc-a4ec-1846ce775d97 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 572.534458] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8fc3b2-bec9-4484-b1ab-d980bf053d46 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.548944] env[63345]: DEBUG nova.compute.provider_tree [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 572.564991] env[63345]: DEBUG oslo_concurrency.lockutils [None req-facd6b05-0ee1-4bc1-a385-375096868ef4 tempest-InstanceActionsTestJSON-1408181625 tempest-InstanceActionsTestJSON-1408181625-project-member] Lock "1f8086d2-bd02-4ed8-a0ba-9fba2cf77f68" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.037s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.054421] env[63345]: DEBUG nova.scheduler.client.report [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 573.070327] env[63345]: DEBUG nova.compute.manager [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 573.435446] env[63345]: DEBUG nova.compute.manager [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 573.466639] env[63345]: DEBUG nova.virt.hardware [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 573.466639] env[63345]: DEBUG nova.virt.hardware [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 573.466639] env[63345]: DEBUG nova.virt.hardware [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 573.466840] env[63345]: DEBUG nova.virt.hardware [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 573.466840] env[63345]: DEBUG nova.virt.hardware [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 573.467986] env[63345]: DEBUG nova.virt.hardware [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 573.468272] env[63345]: DEBUG nova.virt.hardware [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 573.468523] env[63345]: DEBUG nova.virt.hardware [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 573.468772] env[63345]: DEBUG nova.virt.hardware [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 573.468953] env[63345]: DEBUG nova.virt.hardware [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 573.469151] env[63345]: DEBUG nova.virt.hardware [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 573.470046] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e79797b-affc-4030-b39a-54bc788ea879 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.482031] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7698c246-3af1-4b57-8eea-6a9fccb66c6f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.564242] env[63345]: DEBUG nova.compute.manager [req-ed90c48b-d1eb-4c18-90b2-0e9aee8d576e req-964b7ced-0fee-4829-8128-a8d01f528e52 service nova] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Received event network-changed-1072c48c-5de1-46dc-a4ec-1846ce775d97 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 573.564438] env[63345]: DEBUG nova.compute.manager [req-ed90c48b-d1eb-4c18-90b2-0e9aee8d576e req-964b7ced-0fee-4829-8128-a8d01f528e52 service nova] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Refreshing instance network info cache due to event network-changed-1072c48c-5de1-46dc-a4ec-1846ce775d97. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 573.564652] env[63345]: DEBUG oslo_concurrency.lockutils [req-ed90c48b-d1eb-4c18-90b2-0e9aee8d576e req-964b7ced-0fee-4829-8128-a8d01f528e52 service nova] Acquiring lock "refresh_cache-6e8ef6b9-4684-4685-949a-2e2868aa3fb7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.564795] env[63345]: DEBUG oslo_concurrency.lockutils [req-ed90c48b-d1eb-4c18-90b2-0e9aee8d576e req-964b7ced-0fee-4829-8128-a8d01f528e52 service nova] Acquired lock "refresh_cache-6e8ef6b9-4684-4685-949a-2e2868aa3fb7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.565195] env[63345]: DEBUG nova.network.neutron [req-ed90c48b-d1eb-4c18-90b2-0e9aee8d576e req-964b7ced-0fee-4829-8128-a8d01f528e52 service nova] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Refreshing network info cache for port 1072c48c-5de1-46dc-a4ec-1846ce775d97 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 573.566921] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.168s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.567561] env[63345]: ERROR nova.compute.manager [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ca7e4f78-240a-446f-ad69-0e78b3d99ee7, please check neutron logs for more information. [ 573.567561] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Traceback (most recent call last): [ 573.567561] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 573.567561] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] self.driver.spawn(context, instance, image_meta, [ 573.567561] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 573.567561] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 573.567561] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 573.567561] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] vm_ref = self.build_virtual_machine(instance, [ 573.567561] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 573.567561] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] vif_infos = vmwarevif.get_vif_info(self._session, [ 573.567561] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 573.568114] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] for vif in network_info: [ 573.568114] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 573.568114] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] return self._sync_wrapper(fn, *args, **kwargs) [ 573.568114] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 573.568114] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] self.wait() [ 573.568114] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 573.568114] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] self[:] = self._gt.wait() [ 573.568114] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 573.568114] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] return self._exit_event.wait() [ 573.568114] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 573.568114] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] result = hub.switch() [ 573.568114] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 573.568114] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] return self.greenlet.switch() [ 573.568717] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 573.568717] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] result = function(*args, **kwargs) [ 573.568717] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 573.568717] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] return func(*args, **kwargs) [ 573.568717] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 573.568717] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] raise e [ 573.568717] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 573.568717] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] nwinfo = self.network_api.allocate_for_instance( [ 573.568717] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 573.568717] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] created_port_ids = self._update_ports_for_instance( [ 573.568717] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 573.568717] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] with excutils.save_and_reraise_exception(): [ 573.568717] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 573.569363] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] self.force_reraise() [ 573.569363] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 573.569363] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] raise self.value [ 573.569363] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 573.569363] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] updated_port = self._update_port( [ 573.569363] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 573.569363] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] _ensure_no_port_binding_failure(port) [ 573.569363] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 573.569363] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] raise exception.PortBindingFailed(port_id=port['id']) [ 573.569363] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] nova.exception.PortBindingFailed: Binding failed for port ca7e4f78-240a-446f-ad69-0e78b3d99ee7, please check neutron logs for more information. [ 573.569363] env[63345]: ERROR nova.compute.manager [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] [ 573.571261] env[63345]: DEBUG nova.compute.utils [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Binding failed for port ca7e4f78-240a-446f-ad69-0e78b3d99ee7, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 573.572539] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.524s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.573925] env[63345]: INFO nova.compute.claims [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 573.578973] env[63345]: DEBUG nova.compute.manager [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Build of instance cec6ec60-5e8a-4c31-ba75-001f3c1980f0 was re-scheduled: Binding failed for port ca7e4f78-240a-446f-ad69-0e78b3d99ee7, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 573.579755] env[63345]: DEBUG nova.compute.manager [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 573.580670] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Acquiring lock "refresh_cache-cec6ec60-5e8a-4c31-ba75-001f3c1980f0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.580670] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Acquired lock "refresh_cache-cec6ec60-5e8a-4c31-ba75-001f3c1980f0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.580670] env[63345]: DEBUG nova.network.neutron [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 573.601472] env[63345]: ERROR nova.compute.manager [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1072c48c-5de1-46dc-a4ec-1846ce775d97, please check neutron logs for more information. [ 573.601472] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 573.601472] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 573.601472] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 573.601472] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 573.601472] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 573.601472] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 573.601472] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 573.601472] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 573.601472] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 573.601472] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 573.601472] env[63345]: ERROR nova.compute.manager raise self.value [ 573.601472] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 573.601472] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 573.601472] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 573.601472] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 573.602030] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 573.602030] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 573.602030] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1072c48c-5de1-46dc-a4ec-1846ce775d97, please check neutron logs for more information. [ 573.602030] env[63345]: ERROR nova.compute.manager [ 573.602030] env[63345]: Traceback (most recent call last): [ 573.602030] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 573.602030] env[63345]: listener.cb(fileno) [ 573.602030] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 573.602030] env[63345]: result = function(*args, **kwargs) [ 573.602030] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 573.602030] env[63345]: return func(*args, **kwargs) [ 573.602030] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 573.602030] env[63345]: raise e [ 573.602030] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 573.602030] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 573.602030] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 573.602030] env[63345]: created_port_ids = self._update_ports_for_instance( [ 573.602030] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 573.602030] env[63345]: with excutils.save_and_reraise_exception(): [ 573.602030] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 573.602030] env[63345]: self.force_reraise() [ 573.602030] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 573.602030] env[63345]: raise self.value [ 573.602030] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 573.602030] env[63345]: updated_port = self._update_port( [ 573.602030] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 573.602030] env[63345]: _ensure_no_port_binding_failure(port) [ 573.602030] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 573.602030] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 573.602850] env[63345]: nova.exception.PortBindingFailed: Binding failed for port 1072c48c-5de1-46dc-a4ec-1846ce775d97, please check neutron logs for more information. [ 573.602850] env[63345]: Removing descriptor: 16 [ 573.602850] env[63345]: ERROR nova.compute.manager [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1072c48c-5de1-46dc-a4ec-1846ce775d97, please check neutron logs for more information. [ 573.602850] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Traceback (most recent call last): [ 573.602850] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 573.602850] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] yield resources [ 573.602850] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 573.602850] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] self.driver.spawn(context, instance, image_meta, [ 573.602850] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 573.602850] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 573.602850] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 573.602850] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] vm_ref = self.build_virtual_machine(instance, [ 573.603203] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 573.603203] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] vif_infos = vmwarevif.get_vif_info(self._session, [ 573.603203] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 573.603203] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] for vif in network_info: [ 573.603203] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 573.603203] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] return self._sync_wrapper(fn, *args, **kwargs) [ 573.603203] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 573.603203] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] self.wait() [ 573.603203] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 573.603203] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] self[:] = self._gt.wait() [ 573.603203] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 573.603203] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] return self._exit_event.wait() [ 573.603203] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 573.603576] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] result = hub.switch() [ 573.603576] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 573.603576] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] return self.greenlet.switch() [ 573.603576] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 573.603576] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] result = function(*args, **kwargs) [ 573.603576] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 573.603576] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] return func(*args, **kwargs) [ 573.603576] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 573.603576] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] raise e [ 573.603576] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 573.603576] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] nwinfo = self.network_api.allocate_for_instance( [ 573.603576] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 573.603576] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] created_port_ids = self._update_ports_for_instance( [ 573.604026] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 573.604026] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] with excutils.save_and_reraise_exception(): [ 573.604026] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 573.604026] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] self.force_reraise() [ 573.604026] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 573.604026] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] raise self.value [ 573.604026] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 573.604026] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] updated_port = self._update_port( [ 573.604026] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 573.604026] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] _ensure_no_port_binding_failure(port) [ 573.604026] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 573.604026] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] raise exception.PortBindingFailed(port_id=port['id']) [ 573.604728] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] nova.exception.PortBindingFailed: Binding failed for port 1072c48c-5de1-46dc-a4ec-1846ce775d97, please check neutron logs for more information. [ 573.604728] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] [ 573.604728] env[63345]: INFO nova.compute.manager [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Terminating instance [ 573.605278] env[63345]: DEBUG oslo_concurrency.lockutils [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.099524] env[63345]: DEBUG nova.network.neutron [req-ed90c48b-d1eb-4c18-90b2-0e9aee8d576e req-964b7ced-0fee-4829-8128-a8d01f528e52 service nova] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 574.107592] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Acquiring lock "refresh_cache-6e8ef6b9-4684-4685-949a-2e2868aa3fb7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.121942] env[63345]: DEBUG nova.network.neutron [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 574.178163] env[63345]: DEBUG nova.network.neutron [req-ed90c48b-d1eb-4c18-90b2-0e9aee8d576e req-964b7ced-0fee-4829-8128-a8d01f528e52 service nova] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.282357] env[63345]: DEBUG nova.network.neutron [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.681165] env[63345]: DEBUG oslo_concurrency.lockutils [req-ed90c48b-d1eb-4c18-90b2-0e9aee8d576e req-964b7ced-0fee-4829-8128-a8d01f528e52 service nova] Releasing lock "refresh_cache-6e8ef6b9-4684-4685-949a-2e2868aa3fb7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.681581] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Acquired lock "refresh_cache-6e8ef6b9-4684-4685-949a-2e2868aa3fb7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.681770] env[63345]: DEBUG nova.network.neutron [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 574.783554] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Releasing lock "refresh_cache-cec6ec60-5e8a-4c31-ba75-001f3c1980f0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.783811] env[63345]: DEBUG nova.compute.manager [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 574.784102] env[63345]: DEBUG nova.compute.manager [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 574.784246] env[63345]: DEBUG nova.network.neutron [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 574.804323] env[63345]: DEBUG nova.network.neutron [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 575.090573] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487e1333-d118-472e-983f-3683f644beca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.102245] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54fede3-08e3-4ea2-ac6a-d6ec0c858501 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.146681] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5f0c0d-4db4-424f-9107-ebc07c3b1ae5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.154550] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85db7da-66ac-4a47-b66e-63ef98e1a8b5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.168355] env[63345]: DEBUG nova.compute.provider_tree [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 575.207300] env[63345]: DEBUG nova.network.neutron [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 575.281583] env[63345]: DEBUG nova.network.neutron [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.308795] env[63345]: DEBUG nova.network.neutron [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.553836] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Acquiring lock "f043239f-7158-4199-a784-d711a5a301be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.554025] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Lock "f043239f-7158-4199-a784-d711a5a301be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.593580] env[63345]: DEBUG nova.compute.manager [req-5b99aafa-66c3-498f-9ed2-40b9b02c07d2 req-16fd51a4-23ae-4f42-870a-01c7491f0e45 service nova] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Received event network-vif-deleted-1072c48c-5de1-46dc-a4ec-1846ce775d97 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 575.676058] env[63345]: DEBUG nova.scheduler.client.report [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 575.786687] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Releasing lock "refresh_cache-6e8ef6b9-4684-4685-949a-2e2868aa3fb7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.787185] env[63345]: DEBUG nova.compute.manager [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 575.787384] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 575.787688] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3359a1e8-e94d-4cd4-837a-80cdfbb09f3c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.796906] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9673cb97-3ea9-44ba-9a32-4d8131f905f4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.812332] env[63345]: INFO nova.compute.manager [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] [instance: cec6ec60-5e8a-4c31-ba75-001f3c1980f0] Took 1.03 seconds to deallocate network for instance. [ 575.819724] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6e8ef6b9-4684-4685-949a-2e2868aa3fb7 could not be found. [ 575.819949] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 575.820143] env[63345]: INFO nova.compute.manager [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Took 0.03 seconds to destroy the instance on the hypervisor. [ 575.820380] env[63345]: DEBUG oslo.service.loopingcall [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 575.820581] env[63345]: DEBUG nova.compute.manager [-] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 575.820677] env[63345]: DEBUG nova.network.neutron [-] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 575.843308] env[63345]: DEBUG nova.network.neutron [-] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 576.182943] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.610s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.183760] env[63345]: DEBUG nova.compute.manager [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 576.186392] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.730s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.346528] env[63345]: DEBUG nova.network.neutron [-] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.692574] env[63345]: DEBUG nova.compute.utils [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 576.696413] env[63345]: DEBUG nova.compute.manager [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Not allocating networking since 'none' was specified. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 576.841660] env[63345]: INFO nova.scheduler.client.report [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Deleted allocations for instance cec6ec60-5e8a-4c31-ba75-001f3c1980f0 [ 576.850020] env[63345]: INFO nova.compute.manager [-] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Took 1.03 seconds to deallocate network for instance. [ 576.854960] env[63345]: DEBUG nova.compute.claims [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 576.855184] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.167818] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5ccdc3-2811-4700-add2-fe36a3fffdb1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.176435] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b709d8f9-d604-4042-8b6d-9e6c2b5d297c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.207561] env[63345]: DEBUG nova.compute.manager [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 577.210756] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e851fb2-0823-4ff6-b2ac-e4a4e2ae2fc8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.218641] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3b125d-3240-40bf-a128-d1908781c349 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.232423] env[63345]: DEBUG nova.compute.provider_tree [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 577.355236] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a097932b-60f9-4384-b1fe-e2686fbffa04 tempest-ServersAdminNegativeTestJSON-1806261786 tempest-ServersAdminNegativeTestJSON-1806261786-project-member] Lock "cec6ec60-5e8a-4c31-ba75-001f3c1980f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.831s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 577.736652] env[63345]: DEBUG nova.scheduler.client.report [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 577.858530] env[63345]: DEBUG nova.compute.manager [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 578.223753] env[63345]: DEBUG nova.compute.manager [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 578.246187] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.060s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.247154] env[63345]: ERROR nova.compute.manager [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b0c5ad0c-5682-4e54-b4c7-916cd8074721, please check neutron logs for more information. [ 578.247154] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Traceback (most recent call last): [ 578.247154] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 578.247154] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] self.driver.spawn(context, instance, image_meta, [ 578.247154] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 578.247154] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] self._vmops.spawn(context, instance, image_meta, injected_files, [ 578.247154] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 578.247154] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] vm_ref = self.build_virtual_machine(instance, [ 578.247154] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 578.247154] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] vif_infos = vmwarevif.get_vif_info(self._session, [ 578.247154] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 578.247477] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] for vif in network_info: [ 578.247477] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 578.247477] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] return self._sync_wrapper(fn, *args, **kwargs) [ 578.247477] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 578.247477] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] self.wait() [ 578.247477] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 578.247477] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] self[:] = self._gt.wait() [ 578.247477] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 578.247477] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] return self._exit_event.wait() [ 578.247477] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 578.247477] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] result = hub.switch() [ 578.247477] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 578.247477] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] return self.greenlet.switch() [ 578.247832] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 578.247832] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] result = function(*args, **kwargs) [ 578.247832] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 578.247832] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] return func(*args, **kwargs) [ 578.247832] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 578.247832] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] raise e [ 578.247832] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 578.247832] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] nwinfo = self.network_api.allocate_for_instance( [ 578.247832] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 578.247832] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] created_port_ids = self._update_ports_for_instance( [ 578.247832] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 578.247832] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] with excutils.save_and_reraise_exception(): [ 578.247832] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 578.248196] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] self.force_reraise() [ 578.248196] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 578.248196] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] raise self.value [ 578.248196] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 578.248196] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] updated_port = self._update_port( [ 578.248196] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 578.248196] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] _ensure_no_port_binding_failure(port) [ 578.248196] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 578.248196] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] raise exception.PortBindingFailed(port_id=port['id']) [ 578.248196] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] nova.exception.PortBindingFailed: Binding failed for port b0c5ad0c-5682-4e54-b4c7-916cd8074721, please check neutron logs for more information. [ 578.248196] env[63345]: ERROR nova.compute.manager [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] [ 578.248500] env[63345]: DEBUG nova.compute.utils [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Binding failed for port b0c5ad0c-5682-4e54-b4c7-916cd8074721, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 578.251187] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.038s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.251397] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.253519] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.835s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.255023] env[63345]: INFO nova.compute.claims [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 578.261162] env[63345]: DEBUG nova.compute.manager [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Build of instance 64c4c933-2b89-409a-9b4c-eccc7f481b67 was re-scheduled: Binding failed for port b0c5ad0c-5682-4e54-b4c7-916cd8074721, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 578.261162] env[63345]: DEBUG nova.compute.manager [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 578.261162] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Acquiring lock "refresh_cache-64c4c933-2b89-409a-9b4c-eccc7f481b67" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.261301] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Acquired lock "refresh_cache-64c4c933-2b89-409a-9b4c-eccc7f481b67" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.261595] env[63345]: DEBUG nova.network.neutron [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 578.265945] env[63345]: DEBUG nova.virt.hardware [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 578.265945] env[63345]: DEBUG nova.virt.hardware [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 578.265945] env[63345]: DEBUG nova.virt.hardware [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 578.266437] env[63345]: DEBUG nova.virt.hardware [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 578.266437] env[63345]: DEBUG nova.virt.hardware [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 578.266437] env[63345]: DEBUG nova.virt.hardware [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 578.266437] env[63345]: DEBUG nova.virt.hardware [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 578.266437] env[63345]: DEBUG nova.virt.hardware [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 578.266614] env[63345]: DEBUG nova.virt.hardware [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 578.266614] env[63345]: DEBUG nova.virt.hardware [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 578.266931] env[63345]: DEBUG nova.virt.hardware [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 578.267617] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e57d80c-e7c9-47a1-ba20-0bf8326500f6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.276107] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18aa003f-a266-4fc6-9c17-ddb55b9216d3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.292435] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Instance VIF info [] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 578.298167] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Creating folder: Project (fadf3865a4dc42beb3d90d476bd54e2d). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 578.300027] env[63345]: INFO nova.scheduler.client.report [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Deleted allocations for instance 51d6db80-9d1f-4e38-a564-f587474f6294 [ 578.303182] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-82684a36-1b6e-413c-a881-bd0e1f0bbb06 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.316466] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Created folder: Project (fadf3865a4dc42beb3d90d476bd54e2d) in parent group-v225918. [ 578.316880] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Creating folder: Instances. Parent ref: group-v225931. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 578.318263] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db0f8fe8-ec52-43cd-8c80-faceecf78054 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.326116] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Created folder: Instances in parent group-v225931. [ 578.326617] env[63345]: DEBUG oslo.service.loopingcall [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 578.326617] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 578.326759] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-888eefca-7b8d-46fd-ae55-e6b064bc5652 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.342720] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 578.342720] env[63345]: value = "task-1016667" [ 578.342720] env[63345]: _type = "Task" [ 578.342720] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.350399] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016667, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.378624] env[63345]: DEBUG oslo_concurrency.lockutils [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.780954] env[63345]: DEBUG nova.network.neutron [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 578.812856] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0ec822aa-b63d-49c1-abe1-705df47e5c43 tempest-ServerDiagnosticsV248Test-1678394502 tempest-ServerDiagnosticsV248Test-1678394502-project-member] Lock "51d6db80-9d1f-4e38-a564-f587474f6294" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.050s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.856017] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016667, 'name': CreateVM_Task, 'duration_secs': 0.241511} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.856114] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 578.856677] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.856872] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.857239] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 578.857500] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c28e41b-238b-4fff-8352-248976ed08d2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.862928] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Waiting for the task: (returnval){ [ 578.862928] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52980603-27f1-457b-c0fc-0e3a5f1b8b61" [ 578.862928] env[63345]: _type = "Task" [ 578.862928] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.872478] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52980603-27f1-457b-c0fc-0e3a5f1b8b61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.882983] env[63345]: DEBUG nova.network.neutron [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.373975] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52980603-27f1-457b-c0fc-0e3a5f1b8b61, 'name': SearchDatastore_Task, 'duration_secs': 0.01168} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.376791] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.377093] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 579.377321] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.377543] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.377648] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 579.378088] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8fbde8cb-feab-496c-a780-f828646b7b4e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.387883] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Releasing lock "refresh_cache-64c4c933-2b89-409a-9b4c-eccc7f481b67" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.387883] env[63345]: DEBUG nova.compute.manager [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 579.387883] env[63345]: DEBUG nova.compute.manager [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 579.388169] env[63345]: DEBUG nova.network.neutron [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 579.392291] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 579.392291] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 579.394683] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3ae070b-b95d-4304-9428-820f0bad1d31 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.400547] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Waiting for the task: (returnval){ [ 579.400547] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c7fd69-e62a-4afc-c167-4a4bab3443c5" [ 579.400547] env[63345]: _type = "Task" [ 579.400547] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.412199] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c7fd69-e62a-4afc-c167-4a4bab3443c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.420696] env[63345]: DEBUG nova.network.neutron [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 579.734990] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca13e80-34e0-4aa6-8873-7aef8109b864 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.746019] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ecade4-8dc9-43f2-b1bb-7fb5aef0417b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.786131] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29272ae1-1015-4c0f-a0a4-c32031782c61 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.795324] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78f2c8b-82eb-4d23-b71f-b5b130681be2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.810304] env[63345]: DEBUG nova.compute.provider_tree [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 579.912653] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c7fd69-e62a-4afc-c167-4a4bab3443c5, 'name': SearchDatastore_Task, 'duration_secs': 0.008885} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.913473] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfe7b017-6172-486f-ad54-ae556f9aca12 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.919406] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Waiting for the task: (returnval){ [ 579.919406] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b6ebc7-2fc5-a9c0-6366-6bc5e10b0480" [ 579.919406] env[63345]: _type = "Task" [ 579.919406] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.923191] env[63345]: DEBUG nova.network.neutron [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.927408] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b6ebc7-2fc5-a9c0-6366-6bc5e10b0480, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.313753] env[63345]: DEBUG nova.scheduler.client.report [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 580.434103] env[63345]: INFO nova.compute.manager [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] [instance: 64c4c933-2b89-409a-9b4c-eccc7f481b67] Took 1.05 seconds to deallocate network for instance. [ 580.436610] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b6ebc7-2fc5-a9c0-6366-6bc5e10b0480, 'name': SearchDatastore_Task, 'duration_secs': 0.008343} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.437082] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.437360] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 1f595aef-799f-4ca4-be91-e95ef056926c/1f595aef-799f-4ca4-be91-e95ef056926c.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 580.437603] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72bf2f64-489c-4f90-bcff-202a73b29bc8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.444963] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Waiting for the task: (returnval){ [ 580.444963] env[63345]: value = "task-1016668" [ 580.444963] env[63345]: _type = "Task" [ 580.444963] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.453989] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': task-1016668, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.823784] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.570s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.824384] env[63345]: DEBUG nova.compute.manager [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 580.827513] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.856s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.957787] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': task-1016668, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483625} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.958047] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 1f595aef-799f-4ca4-be91-e95ef056926c/1f595aef-799f-4ca4-be91-e95ef056926c.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 580.958258] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 580.958501] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc6838b7-604b-477e-9ebf-0ebec50c48c4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.964816] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Waiting for the task: (returnval){ [ 580.964816] env[63345]: value = "task-1016669" [ 580.964816] env[63345]: _type = "Task" [ 580.964816] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.974247] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': task-1016669, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.335310] env[63345]: DEBUG nova.compute.utils [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 581.340630] env[63345]: DEBUG nova.compute.manager [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 581.344138] env[63345]: DEBUG nova.network.neutron [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 581.431572] env[63345]: DEBUG nova.policy [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '41a31c48e1c74caca2453cc23fa75f14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '913d98b54a4646b681201d14fad3c22e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 581.466657] env[63345]: INFO nova.scheduler.client.report [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Deleted allocations for instance 64c4c933-2b89-409a-9b4c-eccc7f481b67 [ 581.491410] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': task-1016669, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058446} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.491410] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 581.491673] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091742ec-ac7e-49a0-965b-d35cf4cd8c51 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.515703] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] 1f595aef-799f-4ca4-be91-e95ef056926c/1f595aef-799f-4ca4-be91-e95ef056926c.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 581.521428] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b177044-3dfc-478d-bfe9-44986d72e856 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.543293] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Waiting for the task: (returnval){ [ 581.543293] env[63345]: value = "task-1016670" [ 581.543293] env[63345]: _type = "Task" [ 581.543293] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.557434] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': task-1016670, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.843250] env[63345]: DEBUG nova.compute.manager [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 581.885466] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b322404-f9d3-4c9d-aede-8e7dd1919c6b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.894234] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd889c72-c767-4ea0-a902-d00db3c68296 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.924160] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42466751-b406-41d6-b386-c87483c96c2c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.937628] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8e5c69-60f2-4884-b8e0-37596f47ea4c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.954545] env[63345]: DEBUG nova.compute.provider_tree [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 581.985394] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8eb6a8e8-18a6-4bca-bb00-3bbb78f015ed tempest-FloatingIPsAssociationTestJSON-967064990 tempest-FloatingIPsAssociationTestJSON-967064990-project-member] Lock "64c4c933-2b89-409a-9b4c-eccc7f481b67" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.742s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.053650] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': task-1016670, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.377473] env[63345]: DEBUG nova.network.neutron [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Successfully created port: 7c71cffb-6a8f-43d7-b6d9-bd7e80e2973b {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 582.479386] env[63345]: ERROR nova.scheduler.client.report [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [req-0f8b7530-3860-413e-a6cb-ef4a17068366] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0f8b7530-3860-413e-a6cb-ef4a17068366"}]}: nova.exception.PortBindingFailed: Binding failed for port 3fbb95f1-c342-46f0-9f12-4c6c1d67942d, please check neutron logs for more information. [ 582.489554] env[63345]: DEBUG nova.compute.manager [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 582.498722] env[63345]: DEBUG nova.scheduler.client.report [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 582.519415] env[63345]: DEBUG nova.scheduler.client.report [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 582.519636] env[63345]: DEBUG nova.compute.provider_tree [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 582.538658] env[63345]: DEBUG nova.scheduler.client.report [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 582.560322] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': task-1016670, 'name': ReconfigVM_Task, 'duration_secs': 0.740666} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.560980] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Reconfigured VM instance instance-00000010 to attach disk [datastore2] 1f595aef-799f-4ca4-be91-e95ef056926c/1f595aef-799f-4ca4-be91-e95ef056926c.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 582.564635] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e5b011f-4675-435a-bc94-8e001ea96499 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.570691] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Waiting for the task: (returnval){ [ 582.570691] env[63345]: value = "task-1016671" [ 582.570691] env[63345]: _type = "Task" [ 582.570691] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.574774] env[63345]: DEBUG nova.scheduler.client.report [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 582.583800] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': task-1016671, 'name': Rename_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.862056] env[63345]: DEBUG nova.compute.manager [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 582.902475] env[63345]: DEBUG nova.virt.hardware [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 582.902706] env[63345]: DEBUG nova.virt.hardware [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 582.902858] env[63345]: DEBUG nova.virt.hardware [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 582.903044] env[63345]: DEBUG nova.virt.hardware [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 582.903191] env[63345]: DEBUG nova.virt.hardware [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 582.903332] env[63345]: DEBUG nova.virt.hardware [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 582.903746] env[63345]: DEBUG nova.virt.hardware [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 582.903746] env[63345]: DEBUG nova.virt.hardware [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 582.903866] env[63345]: DEBUG nova.virt.hardware [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 582.904014] env[63345]: DEBUG nova.virt.hardware [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 582.904157] env[63345]: DEBUG nova.virt.hardware [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 582.905304] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3191445-32d4-49c5-9e69-6eac7a6ad754 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.913761] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7a18a3-c749-4bf4-9503-e953f7e83f69 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.017878] env[63345]: DEBUG oslo_concurrency.lockutils [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.082656] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': task-1016671, 'name': Rename_Task, 'duration_secs': 0.14356} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.083504] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 583.083796] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3e3ad4e-47c0-412d-9543-3b160c964de8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.091410] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Waiting for the task: (returnval){ [ 583.091410] env[63345]: value = "task-1016672" [ 583.091410] env[63345]: _type = "Task" [ 583.091410] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.100324] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': task-1016672, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.142984] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d26547-4341-41b5-8ddd-a201a57d1fdb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.150958] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c975b1-c907-41db-b918-08fd14917231 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.184682] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d73e0f-06fc-4aec-8e53-cfefce80710f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.192595] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315d9710-49dc-46e8-8b35-583cc882b094 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.205932] env[63345]: DEBUG nova.compute.provider_tree [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 583.602917] env[63345]: DEBUG oslo_vmware.api [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': task-1016672, 'name': PowerOnVM_Task, 'duration_secs': 0.451623} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.603558] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 583.603907] env[63345]: INFO nova.compute.manager [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Took 5.38 seconds to spawn the instance on the hypervisor. [ 583.604211] env[63345]: DEBUG nova.compute.manager [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 583.605521] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7faa917-5792-4080-b17a-a254331cb3b7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.751224] env[63345]: DEBUG nova.scheduler.client.report [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 48 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 583.751531] env[63345]: DEBUG nova.compute.provider_tree [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 48 to 49 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 583.751666] env[63345]: DEBUG nova.compute.provider_tree [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 584.124029] env[63345]: INFO nova.compute.manager [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Took 37.10 seconds to build instance. [ 584.259518] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 3.432s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.260729] env[63345]: ERROR nova.compute.manager [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3fbb95f1-c342-46f0-9f12-4c6c1d67942d, please check neutron logs for more information. [ 584.260729] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Traceback (most recent call last): [ 584.260729] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 584.260729] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] self.driver.spawn(context, instance, image_meta, [ 584.260729] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 584.260729] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] self._vmops.spawn(context, instance, image_meta, injected_files, [ 584.260729] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 584.260729] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] vm_ref = self.build_virtual_machine(instance, [ 584.260729] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 584.260729] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] vif_infos = vmwarevif.get_vif_info(self._session, [ 584.260729] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 584.261401] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] for vif in network_info: [ 584.261401] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 584.261401] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] return self._sync_wrapper(fn, *args, **kwargs) [ 584.261401] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 584.261401] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] self.wait() [ 584.261401] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 584.261401] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] self[:] = self._gt.wait() [ 584.261401] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 584.261401] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] return self._exit_event.wait() [ 584.261401] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 584.261401] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] result = hub.switch() [ 584.261401] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 584.261401] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] return self.greenlet.switch() [ 584.261989] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 584.261989] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] result = function(*args, **kwargs) [ 584.261989] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 584.261989] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] return func(*args, **kwargs) [ 584.261989] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 584.261989] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] raise e [ 584.261989] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 584.261989] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] nwinfo = self.network_api.allocate_for_instance( [ 584.261989] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 584.261989] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] created_port_ids = self._update_ports_for_instance( [ 584.261989] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 584.261989] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] with excutils.save_and_reraise_exception(): [ 584.261989] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.262453] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] self.force_reraise() [ 584.262453] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.262453] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] raise self.value [ 584.262453] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 584.262453] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] updated_port = self._update_port( [ 584.262453] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.262453] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] _ensure_no_port_binding_failure(port) [ 584.262453] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.262453] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] raise exception.PortBindingFailed(port_id=port['id']) [ 584.262453] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] nova.exception.PortBindingFailed: Binding failed for port 3fbb95f1-c342-46f0-9f12-4c6c1d67942d, please check neutron logs for more information. [ 584.262453] env[63345]: ERROR nova.compute.manager [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] [ 584.262810] env[63345]: DEBUG nova.compute.utils [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Binding failed for port 3fbb95f1-c342-46f0-9f12-4c6c1d67942d, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 584.263688] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.999s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.268994] env[63345]: DEBUG nova.compute.manager [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Build of instance e525b0c2-55f9-43f2-9d4f-faf46c0cd559 was re-scheduled: Binding failed for port 3fbb95f1-c342-46f0-9f12-4c6c1d67942d, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 584.269256] env[63345]: DEBUG nova.compute.manager [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 584.269619] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Acquiring lock "refresh_cache-e525b0c2-55f9-43f2-9d4f-faf46c0cd559" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.269705] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Acquired lock "refresh_cache-e525b0c2-55f9-43f2-9d4f-faf46c0cd559" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.269905] env[63345]: DEBUG nova.network.neutron [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 584.385783] env[63345]: ERROR nova.compute.manager [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7c71cffb-6a8f-43d7-b6d9-bd7e80e2973b, please check neutron logs for more information. [ 584.385783] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 584.385783] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 584.385783] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 584.385783] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 584.385783] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 584.385783] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 584.385783] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 584.385783] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.385783] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 584.385783] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.385783] env[63345]: ERROR nova.compute.manager raise self.value [ 584.385783] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 584.385783] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 584.385783] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.385783] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 584.386356] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.386356] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 584.386356] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7c71cffb-6a8f-43d7-b6d9-bd7e80e2973b, please check neutron logs for more information. [ 584.386356] env[63345]: ERROR nova.compute.manager [ 584.386356] env[63345]: Traceback (most recent call last): [ 584.386356] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 584.386356] env[63345]: listener.cb(fileno) [ 584.386356] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 584.386356] env[63345]: result = function(*args, **kwargs) [ 584.386356] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 584.386356] env[63345]: return func(*args, **kwargs) [ 584.386356] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 584.386356] env[63345]: raise e [ 584.386356] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 584.386356] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 584.386356] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 584.386356] env[63345]: created_port_ids = self._update_ports_for_instance( [ 584.386356] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 584.386356] env[63345]: with excutils.save_and_reraise_exception(): [ 584.386356] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.386356] env[63345]: self.force_reraise() [ 584.386356] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.386356] env[63345]: raise self.value [ 584.386356] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 584.386356] env[63345]: updated_port = self._update_port( [ 584.386356] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.386356] env[63345]: _ensure_no_port_binding_failure(port) [ 584.386356] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.386356] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 584.387315] env[63345]: nova.exception.PortBindingFailed: Binding failed for port 7c71cffb-6a8f-43d7-b6d9-bd7e80e2973b, please check neutron logs for more information. [ 584.387315] env[63345]: Removing descriptor: 15 [ 584.387315] env[63345]: ERROR nova.compute.manager [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7c71cffb-6a8f-43d7-b6d9-bd7e80e2973b, please check neutron logs for more information. [ 584.387315] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Traceback (most recent call last): [ 584.387315] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 584.387315] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] yield resources [ 584.387315] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 584.387315] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] self.driver.spawn(context, instance, image_meta, [ 584.387315] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 584.387315] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] self._vmops.spawn(context, instance, image_meta, injected_files, [ 584.387315] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 584.387315] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] vm_ref = self.build_virtual_machine(instance, [ 584.387743] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 584.387743] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] vif_infos = vmwarevif.get_vif_info(self._session, [ 584.387743] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 584.387743] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] for vif in network_info: [ 584.387743] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 584.387743] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] return self._sync_wrapper(fn, *args, **kwargs) [ 584.387743] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 584.387743] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] self.wait() [ 584.387743] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 584.387743] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] self[:] = self._gt.wait() [ 584.387743] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 584.387743] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] return self._exit_event.wait() [ 584.387743] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 584.388159] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] result = hub.switch() [ 584.388159] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 584.388159] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] return self.greenlet.switch() [ 584.388159] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 584.388159] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] result = function(*args, **kwargs) [ 584.388159] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 584.388159] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] return func(*args, **kwargs) [ 584.388159] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 584.388159] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] raise e [ 584.388159] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 584.388159] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] nwinfo = self.network_api.allocate_for_instance( [ 584.388159] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 584.388159] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] created_port_ids = self._update_ports_for_instance( [ 584.388807] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 584.388807] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] with excutils.save_and_reraise_exception(): [ 584.388807] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.388807] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] self.force_reraise() [ 584.388807] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.388807] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] raise self.value [ 584.388807] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 584.388807] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] updated_port = self._update_port( [ 584.388807] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.388807] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] _ensure_no_port_binding_failure(port) [ 584.388807] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.388807] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] raise exception.PortBindingFailed(port_id=port['id']) [ 584.389332] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] nova.exception.PortBindingFailed: Binding failed for port 7c71cffb-6a8f-43d7-b6d9-bd7e80e2973b, please check neutron logs for more information. [ 584.389332] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] [ 584.389332] env[63345]: INFO nova.compute.manager [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Terminating instance [ 584.465230] env[63345]: DEBUG nova.compute.manager [req-f392ee45-06af-46ce-a771-be9034291d3e req-9e9b66c6-3ddd-4fc9-94fe-d015b941d81e service nova] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Received event network-changed-7c71cffb-6a8f-43d7-b6d9-bd7e80e2973b {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 584.465230] env[63345]: DEBUG nova.compute.manager [req-f392ee45-06af-46ce-a771-be9034291d3e req-9e9b66c6-3ddd-4fc9-94fe-d015b941d81e service nova] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Refreshing instance network info cache due to event network-changed-7c71cffb-6a8f-43d7-b6d9-bd7e80e2973b. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 584.465230] env[63345]: DEBUG oslo_concurrency.lockutils [req-f392ee45-06af-46ce-a771-be9034291d3e req-9e9b66c6-3ddd-4fc9-94fe-d015b941d81e service nova] Acquiring lock "refresh_cache-77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.465230] env[63345]: DEBUG oslo_concurrency.lockutils [req-f392ee45-06af-46ce-a771-be9034291d3e req-9e9b66c6-3ddd-4fc9-94fe-d015b941d81e service nova] Acquired lock "refresh_cache-77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.465230] env[63345]: DEBUG nova.network.neutron [req-f392ee45-06af-46ce-a771-be9034291d3e req-9e9b66c6-3ddd-4fc9-94fe-d015b941d81e service nova] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Refreshing network info cache for port 7c71cffb-6a8f-43d7-b6d9-bd7e80e2973b {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 584.568620] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Acquiring lock "30755716-03a7-41bd-90c2-7ef21baf9975" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.568620] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Lock "30755716-03a7-41bd-90c2-7ef21baf9975" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.627777] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bf46a48d-d86b-4af9-8bd6-42d03b75690c tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Lock "1f595aef-799f-4ca4-be91-e95ef056926c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.259s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.802859] env[63345]: DEBUG nova.network.neutron [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 584.894408] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Acquiring lock "refresh_cache-77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.982956] env[63345]: DEBUG nova.network.neutron [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.129960] env[63345]: DEBUG nova.compute.manager [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 585.148679] env[63345]: DEBUG nova.network.neutron [req-f392ee45-06af-46ce-a771-be9034291d3e req-9e9b66c6-3ddd-4fc9-94fe-d015b941d81e service nova] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 585.263090] env[63345]: DEBUG nova.compute.manager [None req-9cdb690d-e225-42fc-bb8d-780439b4014a tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 585.263090] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48a6b259-c5ee-41a2-a866-6d2dc1e57b3f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.270533] env[63345]: DEBUG nova.network.neutron [req-f392ee45-06af-46ce-a771-be9034291d3e req-9e9b66c6-3ddd-4fc9-94fe-d015b941d81e service nova] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.311166] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde49ea6-71f5-4da0-aedf-9ce4daaa3614 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.320370] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7fa608-9282-48e4-9627-3143c5c53133 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.353266] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2de540-cb17-4f6c-a5ab-b7a1c67fed8e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.362277] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c0345b-99b2-424e-9418-7d5478521bd1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.376625] env[63345]: DEBUG nova.compute.provider_tree [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 585.487892] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Releasing lock "refresh_cache-e525b0c2-55f9-43f2-9d4f-faf46c0cd559" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.487892] env[63345]: DEBUG nova.compute.manager [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 585.487892] env[63345]: DEBUG nova.compute.manager [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 585.487892] env[63345]: DEBUG nova.network.neutron [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 585.509507] env[63345]: DEBUG nova.network.neutron [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 585.558259] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Acquiring lock "1f595aef-799f-4ca4-be91-e95ef056926c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.558522] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Lock "1f595aef-799f-4ca4-be91-e95ef056926c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.558726] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Acquiring lock "1f595aef-799f-4ca4-be91-e95ef056926c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.558960] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Lock "1f595aef-799f-4ca4-be91-e95ef056926c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.559202] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Lock "1f595aef-799f-4ca4-be91-e95ef056926c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.561717] env[63345]: INFO nova.compute.manager [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Terminating instance [ 585.655355] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.774861] env[63345]: INFO nova.compute.manager [None req-9cdb690d-e225-42fc-bb8d-780439b4014a tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] instance snapshotting [ 585.774861] env[63345]: DEBUG nova.objects.instance [None req-9cdb690d-e225-42fc-bb8d-780439b4014a tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Lazy-loading 'flavor' on Instance uuid 1f595aef-799f-4ca4-be91-e95ef056926c {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 585.775845] env[63345]: DEBUG oslo_concurrency.lockutils [req-f392ee45-06af-46ce-a771-be9034291d3e req-9e9b66c6-3ddd-4fc9-94fe-d015b941d81e service nova] Releasing lock "refresh_cache-77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.776253] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Acquired lock "refresh_cache-77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.776497] env[63345]: DEBUG nova.network.neutron [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 585.879157] env[63345]: DEBUG nova.scheduler.client.report [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 586.013310] env[63345]: DEBUG nova.network.neutron [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.065503] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Acquiring lock "refresh_cache-1f595aef-799f-4ca4-be91-e95ef056926c" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.065970] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Acquired lock "refresh_cache-1f595aef-799f-4ca4-be91-e95ef056926c" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.066264] env[63345]: DEBUG nova.network.neutron [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 586.281249] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38966a22-6704-4d86-a9bd-9fcd1e9635db {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.299171] env[63345]: DEBUG nova.network.neutron [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 586.301187] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95a7694-5ff4-48eb-b460-d79cf2463903 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.366238] env[63345]: DEBUG nova.network.neutron [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.383565] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.120s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.384225] env[63345]: ERROR nova.compute.manager [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 29fbfacc-22e8-4e75-8ec6-1ffc12ff0bcc, please check neutron logs for more information. [ 586.384225] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Traceback (most recent call last): [ 586.384225] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 586.384225] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] self.driver.spawn(context, instance, image_meta, [ 586.384225] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 586.384225] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 586.384225] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 586.384225] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] vm_ref = self.build_virtual_machine(instance, [ 586.384225] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 586.384225] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] vif_infos = vmwarevif.get_vif_info(self._session, [ 586.384225] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 586.384653] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] for vif in network_info: [ 586.384653] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 586.384653] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] return self._sync_wrapper(fn, *args, **kwargs) [ 586.384653] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 586.384653] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] self.wait() [ 586.384653] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 586.384653] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] self[:] = self._gt.wait() [ 586.384653] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 586.384653] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] return self._exit_event.wait() [ 586.384653] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 586.384653] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] result = hub.switch() [ 586.384653] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 586.384653] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] return self.greenlet.switch() [ 586.385231] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 586.385231] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] result = function(*args, **kwargs) [ 586.385231] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 586.385231] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] return func(*args, **kwargs) [ 586.385231] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 586.385231] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] raise e [ 586.385231] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 586.385231] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] nwinfo = self.network_api.allocate_for_instance( [ 586.385231] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 586.385231] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] created_port_ids = self._update_ports_for_instance( [ 586.385231] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 586.385231] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] with excutils.save_and_reraise_exception(): [ 586.385231] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 586.385652] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] self.force_reraise() [ 586.385652] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 586.385652] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] raise self.value [ 586.385652] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 586.385652] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] updated_port = self._update_port( [ 586.385652] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 586.385652] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] _ensure_no_port_binding_failure(port) [ 586.385652] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 586.385652] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] raise exception.PortBindingFailed(port_id=port['id']) [ 586.385652] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] nova.exception.PortBindingFailed: Binding failed for port 29fbfacc-22e8-4e75-8ec6-1ffc12ff0bcc, please check neutron logs for more information. [ 586.385652] env[63345]: ERROR nova.compute.manager [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] [ 586.385979] env[63345]: DEBUG nova.compute.utils [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Binding failed for port 29fbfacc-22e8-4e75-8ec6-1ffc12ff0bcc, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 586.386081] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.445s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.388961] env[63345]: DEBUG nova.compute.manager [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Build of instance d467124f-0b2b-4108-90d1-40f149e55ff0 was re-scheduled: Binding failed for port 29fbfacc-22e8-4e75-8ec6-1ffc12ff0bcc, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 586.389436] env[63345]: DEBUG nova.compute.manager [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 586.389662] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Acquiring lock "refresh_cache-d467124f-0b2b-4108-90d1-40f149e55ff0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.389827] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Acquired lock "refresh_cache-d467124f-0b2b-4108-90d1-40f149e55ff0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.389996] env[63345]: DEBUG nova.network.neutron [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 586.515144] env[63345]: INFO nova.compute.manager [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] [instance: e525b0c2-55f9-43f2-9d4f-faf46c0cd559] Took 1.03 seconds to deallocate network for instance. [ 586.602155] env[63345]: DEBUG nova.network.neutron [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 586.658901] env[63345]: DEBUG oslo_concurrency.lockutils [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "869f8110-6490-4a47-955a-0ce085f826af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.659203] env[63345]: DEBUG oslo_concurrency.lockutils [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "869f8110-6490-4a47-955a-0ce085f826af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.689328] env[63345]: DEBUG nova.network.neutron [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.701592] env[63345]: DEBUG nova.compute.manager [req-d133f64e-6f27-4d02-9a6b-a3708e63320c req-1533d26d-74c1-4de3-a760-f93ce89d742d service nova] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Received event network-vif-deleted-7c71cffb-6a8f-43d7-b6d9-bd7e80e2973b {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 586.813678] env[63345]: DEBUG nova.compute.manager [None req-9cdb690d-e225-42fc-bb8d-780439b4014a tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Instance disappeared during snapshot {{(pid=63345) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4580}} [ 586.868796] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Releasing lock "refresh_cache-77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.869601] env[63345]: DEBUG nova.compute.manager [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 586.869808] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 586.870158] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-73d37288-1263-4097-86ae-41c24e49c607 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.880697] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-920fceec-fbca-4654-afc4-31e95d990f37 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.910781] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54 could not be found. [ 586.911160] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 586.911399] env[63345]: INFO nova.compute.manager [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Took 0.04 seconds to destroy the instance on the hypervisor. [ 586.911701] env[63345]: DEBUG oslo.service.loopingcall [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 586.911968] env[63345]: DEBUG nova.compute.manager [-] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 586.912191] env[63345]: DEBUG nova.network.neutron [-] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 586.916627] env[63345]: DEBUG nova.network.neutron [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 586.942126] env[63345]: DEBUG nova.network.neutron [-] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 586.983256] env[63345]: DEBUG nova.compute.manager [None req-9cdb690d-e225-42fc-bb8d-780439b4014a tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Found 0 images (rotation: 2) {{(pid=63345) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4883}} [ 587.077576] env[63345]: DEBUG nova.network.neutron [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.193746] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Releasing lock "refresh_cache-1f595aef-799f-4ca4-be91-e95ef056926c" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.194543] env[63345]: DEBUG nova.compute.manager [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 587.194931] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 587.195989] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e800f5-81e2-4ce1-8b14-51c644a2c39a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.205088] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 587.206307] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76e37a12-03d5-4584-9f04-dfb82d1d965f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.214474] env[63345]: DEBUG oslo_vmware.api [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Waiting for the task: (returnval){ [ 587.214474] env[63345]: value = "task-1016673" [ 587.214474] env[63345]: _type = "Task" [ 587.214474] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.227297] env[63345]: DEBUG oslo_vmware.api [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': task-1016673, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.385013] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56d77cc-58d6-4dc7-b72d-8671320162a4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.394723] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc906014-ee2c-4dee-8513-9082a3363da2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.428902] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b93e96-89d4-4465-9a07-40f6b71d459c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.442099] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd98f8a-3c82-4136-a37c-0f5fb78780ce {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.443897] env[63345]: DEBUG nova.network.neutron [-] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.456174] env[63345]: DEBUG nova.compute.provider_tree [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 587.555015] env[63345]: INFO nova.scheduler.client.report [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Deleted allocations for instance e525b0c2-55f9-43f2-9d4f-faf46c0cd559 [ 587.583035] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Releasing lock "refresh_cache-d467124f-0b2b-4108-90d1-40f149e55ff0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.583035] env[63345]: DEBUG nova.compute.manager [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 587.583035] env[63345]: DEBUG nova.compute.manager [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 587.583035] env[63345]: DEBUG nova.network.neutron [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 587.607119] env[63345]: DEBUG nova.network.neutron [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 587.724898] env[63345]: DEBUG oslo_vmware.api [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': task-1016673, 'name': PowerOffVM_Task, 'duration_secs': 0.131126} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.725200] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 587.725348] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 587.725585] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8be8c925-b75f-42b5-a41e-3f5aae0b542e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.752216] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 587.752437] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 587.752613] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Deleting the datastore file [datastore2] 1f595aef-799f-4ca4-be91-e95ef056926c {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 587.752858] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bebcf6d4-9e8b-48c5-924e-129624660646 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.759074] env[63345]: DEBUG oslo_vmware.api [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Waiting for the task: (returnval){ [ 587.759074] env[63345]: value = "task-1016675" [ 587.759074] env[63345]: _type = "Task" [ 587.759074] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.767593] env[63345]: DEBUG oslo_vmware.api [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': task-1016675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.947347] env[63345]: INFO nova.compute.manager [-] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Took 1.04 seconds to deallocate network for instance. [ 587.949704] env[63345]: DEBUG nova.compute.claims [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 587.949904] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.958940] env[63345]: DEBUG nova.scheduler.client.report [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 588.062242] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fed8ccca-f158-4dc4-bc3d-351314b383ad tempest-VolumesAssistedSnapshotsTest-639887845 tempest-VolumesAssistedSnapshotsTest-639887845-project-member] Lock "e525b0c2-55f9-43f2-9d4f-faf46c0cd559" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.239s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.109450] env[63345]: DEBUG nova.network.neutron [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.269987] env[63345]: DEBUG oslo_vmware.api [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Task: {'id': task-1016675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098843} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.270285] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 588.270486] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 588.270665] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 588.270835] env[63345]: INFO nova.compute.manager [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Took 1.08 seconds to destroy the instance on the hypervisor. [ 588.271083] env[63345]: DEBUG oslo.service.loopingcall [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 588.271273] env[63345]: DEBUG nova.compute.manager [-] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 588.271369] env[63345]: DEBUG nova.network.neutron [-] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 588.289433] env[63345]: DEBUG nova.network.neutron [-] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 588.465555] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.079s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.466473] env[63345]: ERROR nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6afb7501-c3fd-4ca2-b6a8-f228b9a81260, please check neutron logs for more information. [ 588.466473] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] Traceback (most recent call last): [ 588.466473] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 588.466473] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] self.driver.spawn(context, instance, image_meta, [ 588.466473] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 588.466473] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] self._vmops.spawn(context, instance, image_meta, injected_files, [ 588.466473] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 588.466473] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] vm_ref = self.build_virtual_machine(instance, [ 588.466473] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 588.466473] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] vif_infos = vmwarevif.get_vif_info(self._session, [ 588.466473] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 588.466931] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] for vif in network_info: [ 588.466931] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 588.466931] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] return self._sync_wrapper(fn, *args, **kwargs) [ 588.466931] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 588.466931] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] self.wait() [ 588.466931] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 588.466931] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] self[:] = self._gt.wait() [ 588.466931] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 588.466931] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] return self._exit_event.wait() [ 588.466931] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 588.466931] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] result = hub.switch() [ 588.466931] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 588.466931] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] return self.greenlet.switch() [ 588.467438] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 588.467438] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] result = function(*args, **kwargs) [ 588.467438] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 588.467438] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] return func(*args, **kwargs) [ 588.467438] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 588.467438] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] raise e [ 588.467438] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 588.467438] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] nwinfo = self.network_api.allocate_for_instance( [ 588.467438] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 588.467438] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] created_port_ids = self._update_ports_for_instance( [ 588.467438] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 588.467438] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] with excutils.save_and_reraise_exception(): [ 588.467438] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 588.467922] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] self.force_reraise() [ 588.467922] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 588.467922] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] raise self.value [ 588.467922] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 588.467922] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] updated_port = self._update_port( [ 588.467922] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 588.467922] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] _ensure_no_port_binding_failure(port) [ 588.467922] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 588.467922] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] raise exception.PortBindingFailed(port_id=port['id']) [ 588.467922] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] nova.exception.PortBindingFailed: Binding failed for port 6afb7501-c3fd-4ca2-b6a8-f228b9a81260, please check neutron logs for more information. [ 588.467922] env[63345]: ERROR nova.compute.manager [instance: 175ede99-48e4-43dc-b563-140f42244c97] [ 588.468263] env[63345]: DEBUG nova.compute.utils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Binding failed for port 6afb7501-c3fd-4ca2-b6a8-f228b9a81260, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 588.468263] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 30.976s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.468401] env[63345]: DEBUG nova.objects.instance [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63345) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 588.470902] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Build of instance 175ede99-48e4-43dc-b563-140f42244c97 was re-scheduled: Binding failed for port 6afb7501-c3fd-4ca2-b6a8-f228b9a81260, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 588.471350] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 588.472025] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "refresh_cache-175ede99-48e4-43dc-b563-140f42244c97" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.472025] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquired lock "refresh_cache-175ede99-48e4-43dc-b563-140f42244c97" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.472146] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 588.564276] env[63345]: DEBUG nova.compute.manager [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 588.612261] env[63345]: INFO nova.compute.manager [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] [instance: d467124f-0b2b-4108-90d1-40f149e55ff0] Took 1.03 seconds to deallocate network for instance. [ 588.792864] env[63345]: DEBUG nova.network.neutron [-] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.994024] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 589.092796] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.098923] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.297041] env[63345]: INFO nova.compute.manager [-] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Took 1.03 seconds to deallocate network for instance. [ 589.482129] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de0c34dc-abd6-47dd-aba5-2c7e6ab4e7c9 tempest-ServersAdmin275Test-500933732 tempest-ServersAdmin275Test-500933732-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.482888] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 30.553s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.604689] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Releasing lock "refresh_cache-175ede99-48e4-43dc-b563-140f42244c97" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.604689] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 589.604795] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 589.604974] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 589.620879] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 589.645289] env[63345]: INFO nova.scheduler.client.report [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Deleted allocations for instance d467124f-0b2b-4108-90d1-40f149e55ff0 [ 589.682146] env[63345]: DEBUG oslo_concurrency.lockutils [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Acquiring lock "abc81fa5-78a9-48b1-a49e-2faffddf2411" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.682379] env[63345]: DEBUG oslo_concurrency.lockutils [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Lock "abc81fa5-78a9-48b1-a49e-2faffddf2411" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.805774] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.123478] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.155348] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d45e143f-5f98-4d14-bf6e-59c328ff4693 tempest-ServersWithSpecificFlavorTestJSON-889674138 tempest-ServersWithSpecificFlavorTestJSON-889674138-project-member] Lock "d467124f-0b2b-4108-90d1-40f149e55ff0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.449s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.439469] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d56f56-84ff-4a55-851d-31c40db7fd11 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.449122] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dffe7c46-6a64-452c-85f8-c83ba6f2ac30 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.485391] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f49991c5-d289-4bdb-9b7c-1ce35be0e17e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.493650] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8300c7c-8534-4120-95ba-d864eafe1795 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.508261] env[63345]: DEBUG nova.compute.provider_tree [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 590.626334] env[63345]: INFO nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 175ede99-48e4-43dc-b563-140f42244c97] Took 1.02 seconds to deallocate network for instance. [ 590.659775] env[63345]: DEBUG nova.compute.manager [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 591.015211] env[63345]: DEBUG nova.scheduler.client.report [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 591.192749] env[63345]: DEBUG oslo_concurrency.lockutils [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.519590] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.037s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.520273] env[63345]: ERROR nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e9f0dd1d-5e7c-462c-94ba-afe000098ad6, please check neutron logs for more information. [ 591.520273] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Traceback (most recent call last): [ 591.520273] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 591.520273] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] self.driver.spawn(context, instance, image_meta, [ 591.520273] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 591.520273] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 591.520273] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 591.520273] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] vm_ref = self.build_virtual_machine(instance, [ 591.520273] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 591.520273] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] vif_infos = vmwarevif.get_vif_info(self._session, [ 591.520273] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 591.520644] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] for vif in network_info: [ 591.520644] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 591.520644] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] return self._sync_wrapper(fn, *args, **kwargs) [ 591.520644] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 591.520644] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] self.wait() [ 591.520644] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 591.520644] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] self[:] = self._gt.wait() [ 591.520644] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 591.520644] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] return self._exit_event.wait() [ 591.520644] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 591.520644] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] result = hub.switch() [ 591.520644] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 591.520644] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] return self.greenlet.switch() [ 591.520999] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 591.520999] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] result = function(*args, **kwargs) [ 591.520999] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 591.520999] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] return func(*args, **kwargs) [ 591.520999] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 591.520999] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] raise e [ 591.520999] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 591.520999] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] nwinfo = self.network_api.allocate_for_instance( [ 591.520999] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 591.520999] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] created_port_ids = self._update_ports_for_instance( [ 591.520999] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 591.520999] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] with excutils.save_and_reraise_exception(): [ 591.520999] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 591.521480] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] self.force_reraise() [ 591.521480] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 591.521480] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] raise self.value [ 591.521480] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 591.521480] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] updated_port = self._update_port( [ 591.521480] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 591.521480] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] _ensure_no_port_binding_failure(port) [ 591.521480] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 591.521480] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] raise exception.PortBindingFailed(port_id=port['id']) [ 591.521480] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] nova.exception.PortBindingFailed: Binding failed for port e9f0dd1d-5e7c-462c-94ba-afe000098ad6, please check neutron logs for more information. [ 591.521480] env[63345]: ERROR nova.compute.manager [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] [ 591.521788] env[63345]: DEBUG nova.compute.utils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Binding failed for port e9f0dd1d-5e7c-462c-94ba-afe000098ad6, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 591.522690] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.358s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.522914] env[63345]: DEBUG nova.objects.instance [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Lazy-loading 'resources' on Instance uuid 5ef55aca-0714-4b34-85f2-b6d53f97c2d0 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 591.524659] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Build of instance d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc was re-scheduled: Binding failed for port e9f0dd1d-5e7c-462c-94ba-afe000098ad6, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 591.525091] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 591.525320] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "refresh_cache-d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.525470] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquired lock "refresh_cache-d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.525627] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 591.675774] env[63345]: INFO nova.scheduler.client.report [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Deleted allocations for instance 175ede99-48e4-43dc-b563-140f42244c97 [ 592.057675] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 592.194133] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "175ede99-48e4-43dc-b563-140f42244c97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.773s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 592.225602] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.510963] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7558eb8e-df1c-4e6a-b2ed-937bcdae1536 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.518949] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821c0841-1846-4552-b44f-ee41d198da0f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.553097] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2321d3-9608-412e-8c6d-8e5dc5fa0613 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.562362] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-553c62ae-5451-4946-aa97-9a6bb5b6de46 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.578861] env[63345]: DEBUG nova.compute.provider_tree [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.697442] env[63345]: DEBUG nova.compute.manager [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 592.730526] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Releasing lock "refresh_cache-d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.730779] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 592.730950] env[63345]: DEBUG nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 592.731159] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 592.759508] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 593.081596] env[63345]: DEBUG nova.scheduler.client.report [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 593.224239] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.261959] env[63345]: DEBUG nova.network.neutron [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.589060] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.066s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 593.595299] env[63345]: DEBUG oslo_concurrency.lockutils [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.990s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.600349] env[63345]: INFO nova.compute.claims [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 593.644487] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.644857] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.655989] env[63345]: INFO nova.scheduler.client.report [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Deleted allocations for instance 5ef55aca-0714-4b34-85f2-b6d53f97c2d0 [ 593.722057] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "34e0234c-36c4-4878-979b-46f045bd1785" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.722317] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "34e0234c-36c4-4878-979b-46f045bd1785" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.765568] env[63345]: INFO nova.compute.manager [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc] Took 1.03 seconds to deallocate network for instance. [ 594.172169] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55dacc43-d451-450e-98a3-0625a2b4f048 tempest-ServersAdmin275Test-1474130211 tempest-ServersAdmin275Test-1474130211-project-member] Lock "5ef55aca-0714-4b34-85f2-b6d53f97c2d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.305s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.040630] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ad3ddb-ab6f-4288-805a-cc0cf7e46cd2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.051213] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4809a050-4d33-46f0-9f7c-2fd25f079bc1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.087158] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f20f323-4a46-495a-a73d-ef352a919aed {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.095908] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65536fff-e758-4ce0-805b-9118b91650a3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.112403] env[63345]: DEBUG nova.compute.provider_tree [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 595.617075] env[63345]: DEBUG nova.scheduler.client.report [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 595.824266] env[63345]: DEBUG oslo_concurrency.lockutils [None req-086d2aaf-107f-4333-b713-a3f957a970ef tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.364s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.825086] env[63345]: Traceback (most recent call last): [ 595.825086] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 595.825086] env[63345]: self.driver.spawn(context, instance, image_meta, [ 595.825086] env[63345]: File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 595.825086] env[63345]: self._vmops.spawn(context, instance, image_meta, injected_files, [ 595.825086] env[63345]: File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 595.825086] env[63345]: vm_ref = self.build_virtual_machine(instance, [ 595.825086] env[63345]: File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 595.825086] env[63345]: vif_infos = vmwarevif.get_vif_info(self._session, [ 595.825086] env[63345]: File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 595.825086] env[63345]: for vif in network_info: [ 595.825086] env[63345]: File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 595.825086] env[63345]: return self._sync_wrapper(fn, *args, **kwargs) [ 595.825086] env[63345]: File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 595.825086] env[63345]: self.wait() [ 595.825086] env[63345]: File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 595.825086] env[63345]: self[:] = self._gt.wait() [ 595.825086] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 595.825086] env[63345]: return self._exit_event.wait() [ 595.825086] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 595.825086] env[63345]: result = hub.switch() [ 595.825086] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 595.825086] env[63345]: return self.greenlet.switch() [ 595.825086] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 595.825086] env[63345]: result = function(*args, **kwargs) [ 595.825086] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 595.825086] env[63345]: return func(*args, **kwargs) [ 595.825086] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 595.825086] env[63345]: raise e [ 595.825086] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 595.825086] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 595.825086] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 595.825086] env[63345]: created_port_ids = self._update_ports_for_instance( [ 595.825872] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 595.825872] env[63345]: with excutils.save_and_reraise_exception(): [ 595.825872] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 595.825872] env[63345]: self.force_reraise() [ 595.825872] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 595.825872] env[63345]: raise self.value [ 595.825872] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 595.825872] env[63345]: updated_port = self._update_port( [ 595.825872] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 595.825872] env[63345]: _ensure_no_port_binding_failure(port) [ 595.825872] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 595.825872] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 595.825872] env[63345]: nova.exception.PortBindingFailed: Binding failed for port e9f0dd1d-5e7c-462c-94ba-afe000098ad6, please check neutron logs for more information. [ 595.825872] env[63345]: During handling of the above exception, another exception occurred: [ 595.825872] env[63345]: Traceback (most recent call last): [ 595.825872] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2462, in _do_build_and_run_instance [ 595.825872] env[63345]: self._build_and_run_instance(context, instance, image, [ 595.825872] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2754, in _build_and_run_instance [ 595.825872] env[63345]: raise exception.RescheduledException( [ 595.825872] env[63345]: nova.exception.RescheduledException: Build of instance d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc was re-scheduled: Binding failed for port e9f0dd1d-5e7c-462c-94ba-afe000098ad6, please check neutron logs for more information. [ 595.825872] env[63345]: During handling of the above exception, another exception occurred: [ 595.825872] env[63345]: Traceback (most recent call last): [ 595.825872] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenpool.py", line 87, in _spawn_n_impl [ 595.825872] env[63345]: func(*args, **kwargs) [ 595.825872] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 595.825872] env[63345]: return func(*args, **kwargs) [ 595.825872] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 595.825872] env[63345]: return f(*args, **kwargs) [ 595.826487] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2353, in _locked_do_build_and_run_instance [ 595.826487] env[63345]: result = self._do_build_and_run_instance(*args, **kwargs) [ 595.826487] env[63345]: File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 595.826487] env[63345]: with excutils.save_and_reraise_exception(): [ 595.826487] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 595.826487] env[63345]: self.force_reraise() [ 595.826487] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 595.826487] env[63345]: raise self.value [ 595.826487] env[63345]: File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 595.826487] env[63345]: return f(self, context, *args, **kw) [ 595.826487] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 168, in decorated_function [ 595.826487] env[63345]: with excutils.save_and_reraise_exception(): [ 595.826487] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 595.826487] env[63345]: self.force_reraise() [ 595.826487] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 595.826487] env[63345]: raise self.value [ 595.826487] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 159, in decorated_function [ 595.826487] env[63345]: return function(self, context, *args, **kwargs) [ 595.826487] env[63345]: File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 595.826487] env[63345]: return function(self, context, *args, **kwargs) [ 595.826487] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 205, in decorated_function [ 595.826487] env[63345]: return function(self, context, *args, **kwargs) [ 595.826487] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2497, in _do_build_and_run_instance [ 595.826487] env[63345]: instance.save() [ 595.826487] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_versionedobjects/base.py", line 209, in wrapper [ 595.826487] env[63345]: updates, result = self.indirection_api.object_action( [ 595.826487] env[63345]: File "/opt/stack/nova/nova/conductor/rpcapi.py", line 247, in object_action [ 595.826487] env[63345]: return cctxt.call(context, 'object_action', objinst=objinst, [ 595.826487] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/client.py", line 190, in call [ 595.826487] env[63345]: result = self.transport._send( [ 595.827201] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/transport.py", line 123, in _send [ 595.827201] env[63345]: return self._driver.send(target, ctxt, message, [ 595.827201] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 788, in send [ 595.827201] env[63345]: return self._send(target, ctxt, message, wait_for_reply, timeout, [ 595.827201] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 780, in _send [ 595.827201] env[63345]: raise result [ 595.827201] env[63345]: nova.exception_Remote.InstanceNotFound_Remote: Instance d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc could not be found. [ 595.827201] env[63345]: Traceback (most recent call last): [ 595.827201] env[63345]: File "/opt/stack/nova/nova/conductor/manager.py", line 142, in _object_dispatch [ 595.827201] env[63345]: return getattr(target, method)(*args, **kwargs) [ 595.827201] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_versionedobjects/base.py", line 226, in wrapper [ 595.827201] env[63345]: return fn(self, *args, **kwargs) [ 595.827201] env[63345]: File "/opt/stack/nova/nova/objects/instance.py", line 878, in save [ 595.827201] env[63345]: old_ref, inst_ref = db.instance_update_and_get_original( [ 595.827201] env[63345]: File "/opt/stack/nova/nova/db/utils.py", line 35, in wrapper [ 595.827201] env[63345]: return f(*args, **kwargs) [ 595.827201] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/api.py", line 144, in wrapper [ 595.827201] env[63345]: with excutils.save_and_reraise_exception() as ectxt: [ 595.827201] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 595.827201] env[63345]: self.force_reraise() [ 595.827201] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 595.827201] env[63345]: raise self.value [ 595.827201] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/api.py", line 142, in wrapper [ 595.827201] env[63345]: return f(*args, **kwargs) [ 595.827201] env[63345]: File "/opt/stack/nova/nova/db/main/api.py", line 207, in wrapper [ 595.827201] env[63345]: return f(context, *args, **kwargs) [ 595.827201] env[63345]: File "/opt/stack/nova/nova/db/main/api.py", line 2283, in instance_update_and_get_original [ 595.827201] env[63345]: instance_ref = _instance_get_by_uuid(context, instance_uuid, [ 595.827201] env[63345]: File "/opt/stack/nova/nova/db/main/api.py", line 1405, in _instance_get_by_uuid [ 595.827878] env[63345]: raise exception.InstanceNotFound(instance_id=uuid) [ 595.827878] env[63345]: nova.exception.InstanceNotFound: Instance d38a25d2-27c1-4c67-a70a-ca9b1e6eefbc could not be found. [ 596.128600] env[63345]: DEBUG oslo_concurrency.lockutils [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.533s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.129211] env[63345]: DEBUG nova.compute.manager [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 596.131962] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.277s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.330228] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 596.644220] env[63345]: DEBUG nova.compute.utils [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 596.649019] env[63345]: DEBUG nova.compute.manager [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 596.649019] env[63345]: DEBUG nova.network.neutron [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 596.746825] env[63345]: DEBUG nova.policy [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f23f5478fa3447419476495cda21568e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cffff9ec3b5b4bf8b400039c7d461ed8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 596.850318] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.132924] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798077ec-9d86-4e5a-a51a-58a70e2a4260 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.141846] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f9e2f73-8c65-44f5-9d45-56fcf6fe81f1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.148808] env[63345]: DEBUG nova.compute.manager [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 597.181142] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2c86bb-8649-4433-a8d6-17c23ef51cb6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.190131] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9e2be1-6d69-475c-933d-2e3cc517c0c8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.204751] env[63345]: DEBUG nova.compute.provider_tree [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.207722] env[63345]: DEBUG nova.network.neutron [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Successfully created port: bb0b3f3b-ae46-452c-a6d5-2f2f581a8e08 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 597.711447] env[63345]: DEBUG nova.scheduler.client.report [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 598.128494] env[63345]: DEBUG nova.compute.manager [req-15b6c20a-cf72-4ed1-af4a-49a8a1dad98a req-eda7524a-d3c6-4162-a175-55ec65b3b77f service nova] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Received event network-changed-bb0b3f3b-ae46-452c-a6d5-2f2f581a8e08 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 598.128494] env[63345]: DEBUG nova.compute.manager [req-15b6c20a-cf72-4ed1-af4a-49a8a1dad98a req-eda7524a-d3c6-4162-a175-55ec65b3b77f service nova] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Refreshing instance network info cache due to event network-changed-bb0b3f3b-ae46-452c-a6d5-2f2f581a8e08. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 598.128494] env[63345]: DEBUG oslo_concurrency.lockutils [req-15b6c20a-cf72-4ed1-af4a-49a8a1dad98a req-eda7524a-d3c6-4162-a175-55ec65b3b77f service nova] Acquiring lock "refresh_cache-1a54db9b-0482-4038-a505-46447f0c33ef" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.128494] env[63345]: DEBUG oslo_concurrency.lockutils [req-15b6c20a-cf72-4ed1-af4a-49a8a1dad98a req-eda7524a-d3c6-4162-a175-55ec65b3b77f service nova] Acquired lock "refresh_cache-1a54db9b-0482-4038-a505-46447f0c33ef" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.128494] env[63345]: DEBUG nova.network.neutron [req-15b6c20a-cf72-4ed1-af4a-49a8a1dad98a req-eda7524a-d3c6-4162-a175-55ec65b3b77f service nova] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Refreshing network info cache for port bb0b3f3b-ae46-452c-a6d5-2f2f581a8e08 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 598.192139] env[63345]: DEBUG nova.compute.manager [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 598.220300] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.086s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.220300] env[63345]: ERROR nova.compute.manager [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1072c48c-5de1-46dc-a4ec-1846ce775d97, please check neutron logs for more information. [ 598.220300] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Traceback (most recent call last): [ 598.220300] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 598.220300] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] self.driver.spawn(context, instance, image_meta, [ 598.220300] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 598.220300] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 598.220300] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 598.220300] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] vm_ref = self.build_virtual_machine(instance, [ 598.220636] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 598.220636] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] vif_infos = vmwarevif.get_vif_info(self._session, [ 598.220636] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 598.220636] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] for vif in network_info: [ 598.220636] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 598.220636] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] return self._sync_wrapper(fn, *args, **kwargs) [ 598.220636] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 598.220636] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] self.wait() [ 598.220636] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 598.220636] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] self[:] = self._gt.wait() [ 598.220636] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 598.220636] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] return self._exit_event.wait() [ 598.220636] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 598.220970] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] result = hub.switch() [ 598.220970] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 598.220970] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] return self.greenlet.switch() [ 598.220970] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 598.220970] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] result = function(*args, **kwargs) [ 598.220970] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 598.220970] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] return func(*args, **kwargs) [ 598.220970] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 598.220970] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] raise e [ 598.220970] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 598.220970] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] nwinfo = self.network_api.allocate_for_instance( [ 598.220970] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 598.220970] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] created_port_ids = self._update_ports_for_instance( [ 598.222126] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 598.222126] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] with excutils.save_and_reraise_exception(): [ 598.222126] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 598.222126] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] self.force_reraise() [ 598.222126] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 598.222126] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] raise self.value [ 598.222126] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 598.222126] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] updated_port = self._update_port( [ 598.222126] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 598.222126] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] _ensure_no_port_binding_failure(port) [ 598.222126] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 598.222126] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] raise exception.PortBindingFailed(port_id=port['id']) [ 598.222423] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] nova.exception.PortBindingFailed: Binding failed for port 1072c48c-5de1-46dc-a4ec-1846ce775d97, please check neutron logs for more information. [ 598.222423] env[63345]: ERROR nova.compute.manager [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] [ 598.222423] env[63345]: DEBUG nova.compute.utils [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Binding failed for port 1072c48c-5de1-46dc-a4ec-1846ce775d97, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 598.226288] env[63345]: DEBUG nova.virt.hardware [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 598.226288] env[63345]: DEBUG nova.virt.hardware [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 598.226288] env[63345]: DEBUG nova.virt.hardware [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 598.226474] env[63345]: DEBUG nova.virt.hardware [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 598.226474] env[63345]: DEBUG nova.virt.hardware [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 598.226474] env[63345]: DEBUG nova.virt.hardware [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 598.226938] env[63345]: DEBUG nova.virt.hardware [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 598.230021] env[63345]: DEBUG nova.virt.hardware [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 598.230021] env[63345]: DEBUG nova.virt.hardware [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 598.230021] env[63345]: DEBUG nova.virt.hardware [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 598.230021] env[63345]: DEBUG nova.virt.hardware [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 598.230021] env[63345]: DEBUG oslo_concurrency.lockutils [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.850s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.230212] env[63345]: INFO nova.compute.claims [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.233353] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208d0128-eeb8-4f9e-9a93-42f545ce5989 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.237338] env[63345]: DEBUG nova.compute.manager [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Build of instance 6e8ef6b9-4684-4685-949a-2e2868aa3fb7 was re-scheduled: Binding failed for port 1072c48c-5de1-46dc-a4ec-1846ce775d97, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 598.238067] env[63345]: DEBUG nova.compute.manager [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 598.238399] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Acquiring lock "refresh_cache-6e8ef6b9-4684-4685-949a-2e2868aa3fb7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.238645] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Acquired lock "refresh_cache-6e8ef6b9-4684-4685-949a-2e2868aa3fb7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.238895] env[63345]: DEBUG nova.network.neutron [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 598.246109] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c473105-2f22-4466-91bf-901f1a67e765 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.355070] env[63345]: ERROR nova.compute.manager [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bb0b3f3b-ae46-452c-a6d5-2f2f581a8e08, please check neutron logs for more information. [ 598.355070] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 598.355070] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 598.355070] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 598.355070] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 598.355070] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 598.355070] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 598.355070] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 598.355070] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 598.355070] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 598.355070] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 598.355070] env[63345]: ERROR nova.compute.manager raise self.value [ 598.355070] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 598.355070] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 598.355070] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 598.355070] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 598.355528] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 598.355528] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 598.355528] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bb0b3f3b-ae46-452c-a6d5-2f2f581a8e08, please check neutron logs for more information. [ 598.355528] env[63345]: ERROR nova.compute.manager [ 598.357031] env[63345]: Traceback (most recent call last): [ 598.357031] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 598.357031] env[63345]: listener.cb(fileno) [ 598.357031] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 598.357031] env[63345]: result = function(*args, **kwargs) [ 598.357031] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 598.357031] env[63345]: return func(*args, **kwargs) [ 598.357031] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 598.357031] env[63345]: raise e [ 598.357031] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 598.357031] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 598.357031] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 598.357031] env[63345]: created_port_ids = self._update_ports_for_instance( [ 598.357031] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 598.357031] env[63345]: with excutils.save_and_reraise_exception(): [ 598.357031] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 598.357031] env[63345]: self.force_reraise() [ 598.357031] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 598.357031] env[63345]: raise self.value [ 598.357031] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 598.357031] env[63345]: updated_port = self._update_port( [ 598.357031] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 598.357031] env[63345]: _ensure_no_port_binding_failure(port) [ 598.357031] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 598.357031] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 598.357031] env[63345]: nova.exception.PortBindingFailed: Binding failed for port bb0b3f3b-ae46-452c-a6d5-2f2f581a8e08, please check neutron logs for more information. [ 598.357031] env[63345]: Removing descriptor: 16 [ 598.358902] env[63345]: ERROR nova.compute.manager [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bb0b3f3b-ae46-452c-a6d5-2f2f581a8e08, please check neutron logs for more information. [ 598.358902] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Traceback (most recent call last): [ 598.358902] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 598.358902] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] yield resources [ 598.358902] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 598.358902] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] self.driver.spawn(context, instance, image_meta, [ 598.358902] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 598.358902] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 598.358902] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 598.358902] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] vm_ref = self.build_virtual_machine(instance, [ 598.358902] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 598.359291] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] vif_infos = vmwarevif.get_vif_info(self._session, [ 598.359291] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 598.359291] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] for vif in network_info: [ 598.359291] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 598.359291] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] return self._sync_wrapper(fn, *args, **kwargs) [ 598.359291] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 598.359291] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] self.wait() [ 598.359291] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 598.359291] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] self[:] = self._gt.wait() [ 598.359291] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 598.359291] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] return self._exit_event.wait() [ 598.359291] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 598.359291] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] result = hub.switch() [ 598.359643] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 598.359643] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] return self.greenlet.switch() [ 598.359643] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 598.359643] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] result = function(*args, **kwargs) [ 598.359643] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 598.359643] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] return func(*args, **kwargs) [ 598.359643] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 598.359643] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] raise e [ 598.359643] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 598.359643] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] nwinfo = self.network_api.allocate_for_instance( [ 598.359643] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 598.359643] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] created_port_ids = self._update_ports_for_instance( [ 598.359643] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 598.361444] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] with excutils.save_and_reraise_exception(): [ 598.361444] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 598.361444] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] self.force_reraise() [ 598.361444] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 598.361444] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] raise self.value [ 598.361444] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 598.361444] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] updated_port = self._update_port( [ 598.361444] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 598.361444] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] _ensure_no_port_binding_failure(port) [ 598.361444] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 598.361444] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] raise exception.PortBindingFailed(port_id=port['id']) [ 598.361444] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] nova.exception.PortBindingFailed: Binding failed for port bb0b3f3b-ae46-452c-a6d5-2f2f581a8e08, please check neutron logs for more information. [ 598.361444] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] [ 598.361751] env[63345]: INFO nova.compute.manager [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Terminating instance [ 598.653150] env[63345]: DEBUG nova.network.neutron [req-15b6c20a-cf72-4ed1-af4a-49a8a1dad98a req-eda7524a-d3c6-4162-a175-55ec65b3b77f service nova] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 598.728391] env[63345]: DEBUG nova.network.neutron [req-15b6c20a-cf72-4ed1-af4a-49a8a1dad98a req-eda7524a-d3c6-4162-a175-55ec65b3b77f service nova] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.770225] env[63345]: DEBUG nova.network.neutron [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 598.824400] env[63345]: DEBUG nova.network.neutron [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.865619] env[63345]: DEBUG oslo_concurrency.lockutils [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Acquiring lock "refresh_cache-1a54db9b-0482-4038-a505-46447f0c33ef" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.232198] env[63345]: DEBUG oslo_concurrency.lockutils [req-15b6c20a-cf72-4ed1-af4a-49a8a1dad98a req-eda7524a-d3c6-4162-a175-55ec65b3b77f service nova] Releasing lock "refresh_cache-1a54db9b-0482-4038-a505-46447f0c33ef" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.232614] env[63345]: DEBUG oslo_concurrency.lockutils [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Acquired lock "refresh_cache-1a54db9b-0482-4038-a505-46447f0c33ef" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.232993] env[63345]: DEBUG nova.network.neutron [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 599.327329] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Releasing lock "refresh_cache-6e8ef6b9-4684-4685-949a-2e2868aa3fb7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.327559] env[63345]: DEBUG nova.compute.manager [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 599.327722] env[63345]: DEBUG nova.compute.manager [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 599.327887] env[63345]: DEBUG nova.network.neutron [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 599.354344] env[63345]: DEBUG nova.network.neutron [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 599.737143] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed2090b-6f45-4a06-a8f2-9918f84232cd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.748557] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327c89cc-740e-4601-be5f-dc691b39794a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.781224] env[63345]: DEBUG nova.network.neutron [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 599.783610] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8938f2-0a4c-43f2-b5d1-7bb2bab892ba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.792278] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b2e71a-0843-4bcf-b09a-26403f84a719 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.808213] env[63345]: DEBUG nova.compute.provider_tree [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.857914] env[63345]: DEBUG nova.network.neutron [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.888173] env[63345]: DEBUG nova.network.neutron [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.179348] env[63345]: DEBUG nova.compute.manager [req-d0ba0f8f-18e4-493d-a2a6-2614f8033ab5 req-89b6fb26-c70f-4777-9694-b0bcebf1d768 service nova] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Received event network-vif-deleted-bb0b3f3b-ae46-452c-a6d5-2f2f581a8e08 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 600.313516] env[63345]: DEBUG nova.scheduler.client.report [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 600.362038] env[63345]: INFO nova.compute.manager [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 6e8ef6b9-4684-4685-949a-2e2868aa3fb7] Took 1.03 seconds to deallocate network for instance. [ 600.390731] env[63345]: DEBUG oslo_concurrency.lockutils [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Releasing lock "refresh_cache-1a54db9b-0482-4038-a505-46447f0c33ef" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.391860] env[63345]: DEBUG nova.compute.manager [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 600.392551] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 600.393060] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b182645f-c88a-44be-9347-87943ee2caba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.407886] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44843c0f-0f8c-4f24-828a-29c009c9380d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.432140] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1a54db9b-0482-4038-a505-46447f0c33ef could not be found. [ 600.432140] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 600.432338] env[63345]: INFO nova.compute.manager [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Took 0.04 seconds to destroy the instance on the hypervisor. [ 600.432576] env[63345]: DEBUG oslo.service.loopingcall [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 600.432767] env[63345]: DEBUG nova.compute.manager [-] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 600.432861] env[63345]: DEBUG nova.network.neutron [-] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 600.449466] env[63345]: DEBUG nova.network.neutron [-] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 600.820868] env[63345]: DEBUG oslo_concurrency.lockutils [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.593s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.822120] env[63345]: DEBUG nova.compute.manager [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 600.824074] env[63345]: DEBUG oslo_concurrency.lockutils [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.806s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.829161] env[63345]: INFO nova.compute.claims [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.952588] env[63345]: DEBUG nova.network.neutron [-] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.146664] env[63345]: DEBUG oslo_concurrency.lockutils [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "85fb1ecd-4ca3-401d-a87a-131f0b275506" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.146996] env[63345]: DEBUG oslo_concurrency.lockutils [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "85fb1ecd-4ca3-401d-a87a-131f0b275506" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.330449] env[63345]: DEBUG nova.compute.utils [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 601.332581] env[63345]: DEBUG nova.compute.manager [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 601.332801] env[63345]: DEBUG nova.network.neutron [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 601.395424] env[63345]: INFO nova.scheduler.client.report [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Deleted allocations for instance 6e8ef6b9-4684-4685-949a-2e2868aa3fb7 [ 601.459779] env[63345]: INFO nova.compute.manager [-] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Took 1.03 seconds to deallocate network for instance. [ 601.463767] env[63345]: DEBUG nova.compute.claims [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 601.463969] env[63345]: DEBUG oslo_concurrency.lockutils [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.547041] env[63345]: DEBUG nova.policy [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6055500166344214a404427722503338', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dfc1248fb5ee4f798b6c59154d4cf623', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 601.837792] env[63345]: DEBUG nova.compute.manager [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 601.905415] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f4b490-cb76-4231-a948-849d24288fed tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Lock "6e8ef6b9-4684-4685-949a-2e2868aa3fb7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.854s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.050082] env[63345]: DEBUG nova.network.neutron [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Successfully created port: 22d34ad5-6afb-4378-b16e-db51ac9d8c93 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 602.316949] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-430b6a22-eb35-428c-bd4a-8659523a2eec {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.325334] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa65915-7f07-46f3-a48a-95a48980872e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.966041] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 602.966041] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3c9a76-9951-4c3b-b3be-fb89b56d7b65 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.973645] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ef7013-3a36-438f-a832-7c260dbdd639 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.986807] env[63345]: DEBUG nova.compute.provider_tree [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 603.467199] env[63345]: DEBUG nova.compute.manager [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 603.491110] env[63345]: DEBUG nova.scheduler.client.report [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 603.503017] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.513119] env[63345]: DEBUG nova.compute.manager [req-856db18f-0920-4005-b542-a55a37972ea8 req-e3bed7ce-d7a1-4379-8832-6a939e0583dd service nova] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Received event network-changed-22d34ad5-6afb-4378-b16e-db51ac9d8c93 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 603.513319] env[63345]: DEBUG nova.compute.manager [req-856db18f-0920-4005-b542-a55a37972ea8 req-e3bed7ce-d7a1-4379-8832-6a939e0583dd service nova] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Refreshing instance network info cache due to event network-changed-22d34ad5-6afb-4378-b16e-db51ac9d8c93. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 603.513533] env[63345]: DEBUG oslo_concurrency.lockutils [req-856db18f-0920-4005-b542-a55a37972ea8 req-e3bed7ce-d7a1-4379-8832-6a939e0583dd service nova] Acquiring lock "refresh_cache-e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.513751] env[63345]: DEBUG oslo_concurrency.lockutils [req-856db18f-0920-4005-b542-a55a37972ea8 req-e3bed7ce-d7a1-4379-8832-6a939e0583dd service nova] Acquired lock "refresh_cache-e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.513832] env[63345]: DEBUG nova.network.neutron [req-856db18f-0920-4005-b542-a55a37972ea8 req-e3bed7ce-d7a1-4379-8832-6a939e0583dd service nova] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Refreshing network info cache for port 22d34ad5-6afb-4378-b16e-db51ac9d8c93 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 603.518449] env[63345]: DEBUG nova.virt.hardware [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 603.518674] env[63345]: DEBUG nova.virt.hardware [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 603.518837] env[63345]: DEBUG nova.virt.hardware [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 603.519064] env[63345]: DEBUG nova.virt.hardware [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 603.519964] env[63345]: DEBUG nova.virt.hardware [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 603.520230] env[63345]: DEBUG nova.virt.hardware [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 603.520487] env[63345]: DEBUG nova.virt.hardware [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 603.520658] env[63345]: DEBUG nova.virt.hardware [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 603.520875] env[63345]: DEBUG nova.virt.hardware [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 603.521087] env[63345]: DEBUG nova.virt.hardware [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 603.521306] env[63345]: DEBUG nova.virt.hardware [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 603.522601] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae77ec1-e05e-46aa-9172-61a9bf985f9e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.535401] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8c42b6-32b3-49d7-b899-104509b2f1aa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.605303] env[63345]: DEBUG nova.network.neutron [req-856db18f-0920-4005-b542-a55a37972ea8 req-e3bed7ce-d7a1-4379-8832-6a939e0583dd service nova] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 603.688353] env[63345]: ERROR nova.compute.manager [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 22d34ad5-6afb-4378-b16e-db51ac9d8c93, please check neutron logs for more information. [ 603.688353] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 603.688353] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 603.688353] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 603.688353] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 603.688353] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 603.688353] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 603.688353] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 603.688353] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 603.688353] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 603.688353] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 603.688353] env[63345]: ERROR nova.compute.manager raise self.value [ 603.688353] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 603.688353] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 603.688353] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 603.688353] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 603.688800] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 603.688800] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 603.688800] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 22d34ad5-6afb-4378-b16e-db51ac9d8c93, please check neutron logs for more information. [ 603.688800] env[63345]: ERROR nova.compute.manager [ 603.688800] env[63345]: Traceback (most recent call last): [ 603.688928] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 603.688928] env[63345]: listener.cb(fileno) [ 603.688928] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 603.688928] env[63345]: result = function(*args, **kwargs) [ 603.688928] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 603.688928] env[63345]: return func(*args, **kwargs) [ 603.688928] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 603.688928] env[63345]: raise e [ 603.689173] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 603.689173] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 603.689173] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 603.689173] env[63345]: created_port_ids = self._update_ports_for_instance( [ 603.689173] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 603.689173] env[63345]: with excutils.save_and_reraise_exception(): [ 603.689173] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 603.689173] env[63345]: self.force_reraise() [ 603.689173] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 603.689173] env[63345]: raise self.value [ 603.689173] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 603.689173] env[63345]: updated_port = self._update_port( [ 603.689173] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 603.689173] env[63345]: _ensure_no_port_binding_failure(port) [ 603.689173] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 603.689173] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 603.689173] env[63345]: nova.exception.PortBindingFailed: Binding failed for port 22d34ad5-6afb-4378-b16e-db51ac9d8c93, please check neutron logs for more information. [ 603.689173] env[63345]: Removing descriptor: 16 [ 603.690035] env[63345]: ERROR nova.compute.manager [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 22d34ad5-6afb-4378-b16e-db51ac9d8c93, please check neutron logs for more information. [ 603.690035] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Traceback (most recent call last): [ 603.690035] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 603.690035] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] yield resources [ 603.690035] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 603.690035] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] self.driver.spawn(context, instance, image_meta, [ 603.690035] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 603.690035] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 603.690035] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 603.690035] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] vm_ref = self.build_virtual_machine(instance, [ 603.690035] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 603.690369] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] vif_infos = vmwarevif.get_vif_info(self._session, [ 603.690369] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 603.690369] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] for vif in network_info: [ 603.690369] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 603.690369] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] return self._sync_wrapper(fn, *args, **kwargs) [ 603.690369] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 603.690369] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] self.wait() [ 603.690369] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 603.690369] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] self[:] = self._gt.wait() [ 603.690369] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 603.690369] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] return self._exit_event.wait() [ 603.690369] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 603.690369] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] result = hub.switch() [ 603.690732] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 603.690732] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] return self.greenlet.switch() [ 603.690732] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 603.690732] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] result = function(*args, **kwargs) [ 603.690732] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 603.690732] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] return func(*args, **kwargs) [ 603.690732] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 603.690732] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] raise e [ 603.690732] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 603.690732] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] nwinfo = self.network_api.allocate_for_instance( [ 603.690732] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 603.690732] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] created_port_ids = self._update_ports_for_instance( [ 603.690732] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 603.691678] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] with excutils.save_and_reraise_exception(): [ 603.691678] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 603.691678] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] self.force_reraise() [ 603.691678] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 603.691678] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] raise self.value [ 603.691678] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 603.691678] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] updated_port = self._update_port( [ 603.691678] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 603.691678] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] _ensure_no_port_binding_failure(port) [ 603.691678] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 603.691678] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] raise exception.PortBindingFailed(port_id=port['id']) [ 603.691678] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] nova.exception.PortBindingFailed: Binding failed for port 22d34ad5-6afb-4378-b16e-db51ac9d8c93, please check neutron logs for more information. [ 603.691678] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] [ 603.692052] env[63345]: INFO nova.compute.manager [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Terminating instance [ 603.749679] env[63345]: DEBUG nova.network.neutron [req-856db18f-0920-4005-b542-a55a37972ea8 req-e3bed7ce-d7a1-4379-8832-6a939e0583dd service nova] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.999655] env[63345]: DEBUG oslo_concurrency.lockutils [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.175s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.000234] env[63345]: DEBUG nova.compute.manager [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 604.004084] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.349s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.007301] env[63345]: INFO nova.compute.claims [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 604.194613] env[63345]: DEBUG oslo_concurrency.lockutils [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "refresh_cache-e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.254346] env[63345]: DEBUG oslo_concurrency.lockutils [req-856db18f-0920-4005-b542-a55a37972ea8 req-e3bed7ce-d7a1-4379-8832-6a939e0583dd service nova] Releasing lock "refresh_cache-e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.254346] env[63345]: DEBUG oslo_concurrency.lockutils [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquired lock "refresh_cache-e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.254346] env[63345]: DEBUG nova.network.neutron [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 604.512116] env[63345]: DEBUG nova.compute.utils [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 604.515783] env[63345]: DEBUG nova.compute.manager [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 604.515953] env[63345]: DEBUG nova.network.neutron [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 604.615348] env[63345]: DEBUG nova.policy [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27fc4f99c7f44b1ea421bd8f13de6e43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '964cee117b3c4601b3afe82a8bb9c23e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 604.774259] env[63345]: DEBUG nova.network.neutron [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 604.860300] env[63345]: DEBUG nova.network.neutron [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.018975] env[63345]: DEBUG nova.compute.manager [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 605.363211] env[63345]: DEBUG oslo_concurrency.lockutils [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Releasing lock "refresh_cache-e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.363648] env[63345]: DEBUG nova.compute.manager [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 605.364820] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 605.364820] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5b70140-3692-46a8-9670-ce956999a830 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.378527] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea223f3-0eff-43f8-ac00-e5ad502fe004 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.410651] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d could not be found. [ 605.410651] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 605.410651] env[63345]: INFO nova.compute.manager [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 605.410651] env[63345]: DEBUG oslo.service.loopingcall [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 605.413173] env[63345]: DEBUG nova.compute.manager [-] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 605.413272] env[63345]: DEBUG nova.network.neutron [-] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 605.430087] env[63345]: DEBUG nova.network.neutron [-] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 605.433987] env[63345]: DEBUG nova.network.neutron [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Successfully created port: 5c1682d3-7aad-4da1-a273-71c9a99cd821 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 605.578203] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59444a15-b290-4e03-a220-9829cec0eff1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.593475] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66139f76-6202-4095-9819-931a7df9e381 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.631491] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324335b0-5569-423c-b360-0f423dfd9c74 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.639567] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4264367b-cf05-444e-ad39-81af30208345 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.655103] env[63345]: DEBUG nova.compute.provider_tree [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 605.748495] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "3101726f-5b14-417e-bcf8-390ce1f9b467" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.748867] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "3101726f-5b14-417e-bcf8-390ce1f9b467" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.773364] env[63345]: DEBUG nova.compute.manager [req-31ceeb8a-648f-4e62-a729-410e68dd6a30 req-c588e966-ae47-4f74-901f-f35d0d7c8426 service nova] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Received event network-vif-deleted-22d34ad5-6afb-4378-b16e-db51ac9d8c93 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 605.935583] env[63345]: DEBUG nova.network.neutron [-] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.032689] env[63345]: DEBUG nova.compute.manager [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 606.065500] env[63345]: DEBUG nova.virt.hardware [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 606.065751] env[63345]: DEBUG nova.virt.hardware [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 606.065909] env[63345]: DEBUG nova.virt.hardware [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 606.066105] env[63345]: DEBUG nova.virt.hardware [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 606.066257] env[63345]: DEBUG nova.virt.hardware [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 606.066554] env[63345]: DEBUG nova.virt.hardware [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 606.067434] env[63345]: DEBUG nova.virt.hardware [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 606.067434] env[63345]: DEBUG nova.virt.hardware [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 606.067434] env[63345]: DEBUG nova.virt.hardware [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 606.067434] env[63345]: DEBUG nova.virt.hardware [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 606.067732] env[63345]: DEBUG nova.virt.hardware [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 606.068675] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177b405b-d3a4-48d1-a04e-1cc1c2cf749f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.077673] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b56902-bd32-4363-b547-b99699f0047e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.157588] env[63345]: DEBUG nova.scheduler.client.report [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 606.440202] env[63345]: INFO nova.compute.manager [-] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Took 1.03 seconds to deallocate network for instance. [ 606.445902] env[63345]: DEBUG nova.compute.claims [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 606.446110] env[63345]: DEBUG oslo_concurrency.lockutils [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.671254] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.667s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.671776] env[63345]: DEBUG nova.compute.manager [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 606.674652] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.725s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.709627] env[63345]: DEBUG nova.compute.manager [req-09a4d87d-b2fd-427f-9d53-c112cba952b3 req-40aacd9a-5ce8-4555-9559-e668eab2d1ca service nova] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Received event network-changed-5c1682d3-7aad-4da1-a273-71c9a99cd821 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 606.709816] env[63345]: DEBUG nova.compute.manager [req-09a4d87d-b2fd-427f-9d53-c112cba952b3 req-40aacd9a-5ce8-4555-9559-e668eab2d1ca service nova] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Refreshing instance network info cache due to event network-changed-5c1682d3-7aad-4da1-a273-71c9a99cd821. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 606.710044] env[63345]: DEBUG oslo_concurrency.lockutils [req-09a4d87d-b2fd-427f-9d53-c112cba952b3 req-40aacd9a-5ce8-4555-9559-e668eab2d1ca service nova] Acquiring lock "refresh_cache-079cd9f1-4753-4298-9b06-c3b9925d2982" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.710235] env[63345]: DEBUG oslo_concurrency.lockutils [req-09a4d87d-b2fd-427f-9d53-c112cba952b3 req-40aacd9a-5ce8-4555-9559-e668eab2d1ca service nova] Acquired lock "refresh_cache-079cd9f1-4753-4298-9b06-c3b9925d2982" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.710407] env[63345]: DEBUG nova.network.neutron [req-09a4d87d-b2fd-427f-9d53-c112cba952b3 req-40aacd9a-5ce8-4555-9559-e668eab2d1ca service nova] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Refreshing network info cache for port 5c1682d3-7aad-4da1-a273-71c9a99cd821 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 606.969749] env[63345]: ERROR nova.compute.manager [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5c1682d3-7aad-4da1-a273-71c9a99cd821, please check neutron logs for more information. [ 606.969749] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 606.969749] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 606.969749] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 606.969749] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 606.969749] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 606.969749] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 606.969749] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 606.969749] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 606.969749] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 606.969749] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 606.969749] env[63345]: ERROR nova.compute.manager raise self.value [ 606.969749] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 606.969749] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 606.969749] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 606.969749] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 606.970225] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 606.970225] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 606.970225] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5c1682d3-7aad-4da1-a273-71c9a99cd821, please check neutron logs for more information. [ 606.970225] env[63345]: ERROR nova.compute.manager [ 606.970225] env[63345]: Traceback (most recent call last): [ 606.970225] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 606.970225] env[63345]: listener.cb(fileno) [ 606.970225] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 606.970225] env[63345]: result = function(*args, **kwargs) [ 606.970225] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 606.970225] env[63345]: return func(*args, **kwargs) [ 606.970225] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 606.970225] env[63345]: raise e [ 606.970225] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 606.970225] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 606.970225] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 606.970225] env[63345]: created_port_ids = self._update_ports_for_instance( [ 606.970225] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 606.970225] env[63345]: with excutils.save_and_reraise_exception(): [ 606.970225] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 606.970225] env[63345]: self.force_reraise() [ 606.970225] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 606.970225] env[63345]: raise self.value [ 606.970225] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 606.970225] env[63345]: updated_port = self._update_port( [ 606.970225] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 606.970225] env[63345]: _ensure_no_port_binding_failure(port) [ 606.970225] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 606.970225] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 606.970987] env[63345]: nova.exception.PortBindingFailed: Binding failed for port 5c1682d3-7aad-4da1-a273-71c9a99cd821, please check neutron logs for more information. [ 606.970987] env[63345]: Removing descriptor: 15 [ 606.970987] env[63345]: ERROR nova.compute.manager [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5c1682d3-7aad-4da1-a273-71c9a99cd821, please check neutron logs for more information. [ 606.970987] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Traceback (most recent call last): [ 606.970987] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 606.970987] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] yield resources [ 606.970987] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 606.970987] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] self.driver.spawn(context, instance, image_meta, [ 606.970987] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 606.970987] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] self._vmops.spawn(context, instance, image_meta, injected_files, [ 606.970987] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 606.970987] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] vm_ref = self.build_virtual_machine(instance, [ 606.971307] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 606.971307] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] vif_infos = vmwarevif.get_vif_info(self._session, [ 606.971307] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 606.971307] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] for vif in network_info: [ 606.971307] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 606.971307] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] return self._sync_wrapper(fn, *args, **kwargs) [ 606.971307] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 606.971307] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] self.wait() [ 606.971307] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 606.971307] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] self[:] = self._gt.wait() [ 606.971307] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 606.971307] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] return self._exit_event.wait() [ 606.971307] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 606.971666] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] result = hub.switch() [ 606.971666] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 606.971666] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] return self.greenlet.switch() [ 606.971666] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 606.971666] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] result = function(*args, **kwargs) [ 606.971666] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 606.971666] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] return func(*args, **kwargs) [ 606.971666] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 606.971666] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] raise e [ 606.971666] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 606.971666] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] nwinfo = self.network_api.allocate_for_instance( [ 606.971666] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 606.971666] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] created_port_ids = self._update_ports_for_instance( [ 606.972020] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 606.972020] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] with excutils.save_and_reraise_exception(): [ 606.972020] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 606.972020] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] self.force_reraise() [ 606.972020] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 606.972020] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] raise self.value [ 606.972020] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 606.972020] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] updated_port = self._update_port( [ 606.972020] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 606.972020] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] _ensure_no_port_binding_failure(port) [ 606.972020] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 606.972020] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] raise exception.PortBindingFailed(port_id=port['id']) [ 606.972401] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] nova.exception.PortBindingFailed: Binding failed for port 5c1682d3-7aad-4da1-a273-71c9a99cd821, please check neutron logs for more information. [ 606.972401] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] [ 606.972401] env[63345]: INFO nova.compute.manager [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Terminating instance [ 607.180271] env[63345]: DEBUG nova.compute.utils [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 607.184393] env[63345]: DEBUG nova.compute.manager [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 607.184569] env[63345]: DEBUG nova.network.neutron [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 607.242192] env[63345]: DEBUG nova.network.neutron [req-09a4d87d-b2fd-427f-9d53-c112cba952b3 req-40aacd9a-5ce8-4555-9559-e668eab2d1ca service nova] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 607.251759] env[63345]: DEBUG nova.policy [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd19438fc9e0f4b208dcbabb9faf1f690', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4919485c7e184230b38e703f7ce8a047', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 607.342870] env[63345]: DEBUG nova.network.neutron [req-09a4d87d-b2fd-427f-9d53-c112cba952b3 req-40aacd9a-5ce8-4555-9559-e668eab2d1ca service nova] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.476878] env[63345]: DEBUG oslo_concurrency.lockutils [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "refresh_cache-079cd9f1-4753-4298-9b06-c3b9925d2982" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.638921] env[63345]: DEBUG nova.network.neutron [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Successfully created port: 908a9a32-7233-42bf-a87f-5fe81165e6ac {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 607.690031] env[63345]: DEBUG nova.compute.manager [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 607.695114] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d028d931-3649-4c6a-8a7e-def7970c74ad {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.707788] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc1ea87-6b89-480b-922f-971fc2b2c73e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.738399] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b0790d-9bb7-460e-bba7-454eef402d8c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.747667] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34395f7b-d237-4019-832e-d46e4d5c04f5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.761040] env[63345]: DEBUG nova.compute.provider_tree [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 607.846089] env[63345]: DEBUG oslo_concurrency.lockutils [req-09a4d87d-b2fd-427f-9d53-c112cba952b3 req-40aacd9a-5ce8-4555-9559-e668eab2d1ca service nova] Releasing lock "refresh_cache-079cd9f1-4753-4298-9b06-c3b9925d2982" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.846528] env[63345]: DEBUG oslo_concurrency.lockutils [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "refresh_cache-079cd9f1-4753-4298-9b06-c3b9925d2982" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.846701] env[63345]: DEBUG nova.network.neutron [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 607.957658] env[63345]: DEBUG nova.network.neutron [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Successfully created port: e195d32a-f1c0-4104-a139-8093c170ed1d {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 608.264388] env[63345]: DEBUG nova.scheduler.client.report [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 608.296396] env[63345]: DEBUG nova.network.neutron [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Successfully created port: 3b2174bc-f08e-4ffe-b1e4-1c6105816d1a {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 608.364102] env[63345]: DEBUG nova.network.neutron [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 608.445645] env[63345]: DEBUG nova.network.neutron [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.701967] env[63345]: DEBUG nova.compute.manager [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 608.730453] env[63345]: DEBUG nova.virt.hardware [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 608.730772] env[63345]: DEBUG nova.virt.hardware [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 608.730992] env[63345]: DEBUG nova.virt.hardware [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 608.731501] env[63345]: DEBUG nova.virt.hardware [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 608.731709] env[63345]: DEBUG nova.virt.hardware [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 608.732018] env[63345]: DEBUG nova.virt.hardware [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 608.732283] env[63345]: DEBUG nova.virt.hardware [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 608.732457] env[63345]: DEBUG nova.virt.hardware [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 608.732629] env[63345]: DEBUG nova.virt.hardware [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 608.732793] env[63345]: DEBUG nova.virt.hardware [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 608.732967] env[63345]: DEBUG nova.virt.hardware [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 608.733948] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0646b21-d4e6-4903-a8e4-6b8586ad8ff7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.743124] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5c5313-a46d-45a9-a0d1-3e54b49214b9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.769864] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.095s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 608.770499] env[63345]: ERROR nova.compute.manager [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7c71cffb-6a8f-43d7-b6d9-bd7e80e2973b, please check neutron logs for more information. [ 608.770499] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Traceback (most recent call last): [ 608.770499] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 608.770499] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] self.driver.spawn(context, instance, image_meta, [ 608.770499] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 608.770499] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] self._vmops.spawn(context, instance, image_meta, injected_files, [ 608.770499] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 608.770499] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] vm_ref = self.build_virtual_machine(instance, [ 608.770499] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 608.770499] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] vif_infos = vmwarevif.get_vif_info(self._session, [ 608.770499] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 608.770786] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] for vif in network_info: [ 608.770786] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 608.770786] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] return self._sync_wrapper(fn, *args, **kwargs) [ 608.770786] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 608.770786] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] self.wait() [ 608.770786] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 608.770786] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] self[:] = self._gt.wait() [ 608.770786] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 608.770786] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] return self._exit_event.wait() [ 608.770786] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 608.770786] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] result = hub.switch() [ 608.770786] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 608.770786] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] return self.greenlet.switch() [ 608.771081] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 608.771081] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] result = function(*args, **kwargs) [ 608.771081] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 608.771081] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] return func(*args, **kwargs) [ 608.771081] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 608.771081] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] raise e [ 608.771081] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 608.771081] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] nwinfo = self.network_api.allocate_for_instance( [ 608.771081] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 608.771081] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] created_port_ids = self._update_ports_for_instance( [ 608.771081] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 608.771081] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] with excutils.save_and_reraise_exception(): [ 608.771081] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.771360] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] self.force_reraise() [ 608.771360] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.771360] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] raise self.value [ 608.771360] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 608.771360] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] updated_port = self._update_port( [ 608.771360] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.771360] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] _ensure_no_port_binding_failure(port) [ 608.771360] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.771360] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] raise exception.PortBindingFailed(port_id=port['id']) [ 608.771360] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] nova.exception.PortBindingFailed: Binding failed for port 7c71cffb-6a8f-43d7-b6d9-bd7e80e2973b, please check neutron logs for more information. [ 608.771360] env[63345]: ERROR nova.compute.manager [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] [ 608.771619] env[63345]: DEBUG nova.compute.utils [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Binding failed for port 7c71cffb-6a8f-43d7-b6d9-bd7e80e2973b, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 608.772843] env[63345]: DEBUG nova.compute.manager [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Build of instance 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54 was re-scheduled: Binding failed for port 7c71cffb-6a8f-43d7-b6d9-bd7e80e2973b, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 608.773447] env[63345]: DEBUG nova.compute.manager [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 608.773752] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Acquiring lock "refresh_cache-77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.773967] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Acquired lock "refresh_cache-77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.774146] env[63345]: DEBUG nova.network.neutron [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 608.775098] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.682s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.776523] env[63345]: INFO nova.compute.claims [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 608.951067] env[63345]: DEBUG oslo_concurrency.lockutils [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "refresh_cache-079cd9f1-4753-4298-9b06-c3b9925d2982" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.951505] env[63345]: DEBUG nova.compute.manager [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 608.951715] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 608.952075] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-96cf301e-9e10-461c-9ab8-1ed734ce6af5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.956655] env[63345]: DEBUG nova.compute.manager [req-e2fee949-ac4d-49ea-8a50-df42f6c9a984 req-e6eb69d0-ce59-44b7-9f52-a6aca6edcb48 service nova] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Received event network-vif-deleted-5c1682d3-7aad-4da1-a273-71c9a99cd821 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 608.964092] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1871b2-5834-4aea-b4ec-64e793fb7bab {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.988136] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 079cd9f1-4753-4298-9b06-c3b9925d2982 could not be found. [ 608.988396] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 608.988581] env[63345]: INFO nova.compute.manager [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Took 0.04 seconds to destroy the instance on the hypervisor. [ 608.988819] env[63345]: DEBUG oslo.service.loopingcall [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 608.989048] env[63345]: DEBUG nova.compute.manager [-] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 608.989161] env[63345]: DEBUG nova.network.neutron [-] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 609.004948] env[63345]: DEBUG nova.network.neutron [-] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 609.299856] env[63345]: DEBUG nova.network.neutron [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 609.346370] env[63345]: DEBUG nova.network.neutron [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.507417] env[63345]: DEBUG nova.network.neutron [-] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.617088] env[63345]: ERROR nova.compute.manager [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 908a9a32-7233-42bf-a87f-5fe81165e6ac, please check neutron logs for more information. [ 609.617088] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 609.617088] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 609.617088] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 609.617088] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 609.617088] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 609.617088] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 609.617088] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 609.617088] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 609.617088] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 609.617088] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 609.617088] env[63345]: ERROR nova.compute.manager raise self.value [ 609.617088] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 609.617088] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 609.617088] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 609.617088] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 609.617499] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 609.617499] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 609.617499] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 908a9a32-7233-42bf-a87f-5fe81165e6ac, please check neutron logs for more information. [ 609.617499] env[63345]: ERROR nova.compute.manager [ 609.617499] env[63345]: Traceback (most recent call last): [ 609.617499] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 609.617499] env[63345]: listener.cb(fileno) [ 609.617499] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 609.617499] env[63345]: result = function(*args, **kwargs) [ 609.617499] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 609.617499] env[63345]: return func(*args, **kwargs) [ 609.617499] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 609.617499] env[63345]: raise e [ 609.617499] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 609.617499] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 609.617499] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 609.617499] env[63345]: created_port_ids = self._update_ports_for_instance( [ 609.617499] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 609.617499] env[63345]: with excutils.save_and_reraise_exception(): [ 609.617499] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 609.617499] env[63345]: self.force_reraise() [ 609.617499] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 609.617499] env[63345]: raise self.value [ 609.617499] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 609.617499] env[63345]: updated_port = self._update_port( [ 609.617499] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 609.617499] env[63345]: _ensure_no_port_binding_failure(port) [ 609.617499] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 609.617499] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 609.618236] env[63345]: nova.exception.PortBindingFailed: Binding failed for port 908a9a32-7233-42bf-a87f-5fe81165e6ac, please check neutron logs for more information. [ 609.618236] env[63345]: Removing descriptor: 15 [ 609.618236] env[63345]: ERROR nova.compute.manager [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 908a9a32-7233-42bf-a87f-5fe81165e6ac, please check neutron logs for more information. [ 609.618236] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Traceback (most recent call last): [ 609.618236] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 609.618236] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] yield resources [ 609.618236] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 609.618236] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] self.driver.spawn(context, instance, image_meta, [ 609.618236] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 609.618236] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 609.618236] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 609.618236] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] vm_ref = self.build_virtual_machine(instance, [ 609.618605] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 609.618605] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] vif_infos = vmwarevif.get_vif_info(self._session, [ 609.618605] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 609.618605] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] for vif in network_info: [ 609.618605] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 609.618605] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] return self._sync_wrapper(fn, *args, **kwargs) [ 609.618605] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 609.618605] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] self.wait() [ 609.618605] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 609.618605] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] self[:] = self._gt.wait() [ 609.618605] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 609.618605] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] return self._exit_event.wait() [ 609.618605] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 609.618938] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] result = hub.switch() [ 609.618938] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 609.618938] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] return self.greenlet.switch() [ 609.618938] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 609.618938] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] result = function(*args, **kwargs) [ 609.618938] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 609.618938] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] return func(*args, **kwargs) [ 609.618938] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 609.618938] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] raise e [ 609.618938] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 609.618938] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] nwinfo = self.network_api.allocate_for_instance( [ 609.618938] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 609.618938] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] created_port_ids = self._update_ports_for_instance( [ 609.619296] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 609.619296] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] with excutils.save_and_reraise_exception(): [ 609.619296] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 609.619296] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] self.force_reraise() [ 609.619296] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 609.619296] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] raise self.value [ 609.619296] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 609.619296] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] updated_port = self._update_port( [ 609.619296] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 609.619296] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] _ensure_no_port_binding_failure(port) [ 609.619296] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 609.619296] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] raise exception.PortBindingFailed(port_id=port['id']) [ 609.619600] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] nova.exception.PortBindingFailed: Binding failed for port 908a9a32-7233-42bf-a87f-5fe81165e6ac, please check neutron logs for more information. [ 609.619600] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] [ 609.619600] env[63345]: INFO nova.compute.manager [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Terminating instance [ 609.851621] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Releasing lock "refresh_cache-77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.851621] env[63345]: DEBUG nova.compute.manager [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 609.851621] env[63345]: DEBUG nova.compute.manager [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 609.851621] env[63345]: DEBUG nova.network.neutron [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 609.866180] env[63345]: DEBUG nova.network.neutron [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 610.014815] env[63345]: INFO nova.compute.manager [-] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Took 1.02 seconds to deallocate network for instance. [ 610.018132] env[63345]: DEBUG nova.compute.claims [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 610.018321] env[63345]: DEBUG oslo_concurrency.lockutils [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.129125] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquiring lock "refresh_cache-2889c4d1-ac1b-404d-a4f7-2b908557348d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.129337] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquired lock "refresh_cache-2889c4d1-ac1b-404d-a4f7-2b908557348d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.129515] env[63345]: DEBUG nova.network.neutron [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 610.215015] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb60a1e2-687c-4c5f-a209-ad246a297794 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.226765] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba6a438-c356-49d9-ba7a-babe2e9a9759 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.263016] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6502bb-5a6c-4de1-89ca-244acefec6e7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.268341] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a037c67-1a9a-4dc0-ac4c-046daf513c37 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.282475] env[63345]: DEBUG nova.compute.provider_tree [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 610.368759] env[63345]: DEBUG nova.network.neutron [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.700941] env[63345]: DEBUG nova.network.neutron [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 610.785625] env[63345]: DEBUG nova.scheduler.client.report [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 610.848221] env[63345]: DEBUG nova.network.neutron [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.871676] env[63345]: INFO nova.compute.manager [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] [instance: 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54] Took 1.02 seconds to deallocate network for instance. [ 610.979618] env[63345]: DEBUG nova.compute.manager [req-d1e33f55-53e7-408c-a4b7-ffc963855763 req-6cfa0426-71c4-4713-9738-9f2c891880be service nova] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Received event network-changed-908a9a32-7233-42bf-a87f-5fe81165e6ac {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 610.979852] env[63345]: DEBUG nova.compute.manager [req-d1e33f55-53e7-408c-a4b7-ffc963855763 req-6cfa0426-71c4-4713-9738-9f2c891880be service nova] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Refreshing instance network info cache due to event network-changed-908a9a32-7233-42bf-a87f-5fe81165e6ac. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 610.980137] env[63345]: DEBUG oslo_concurrency.lockutils [req-d1e33f55-53e7-408c-a4b7-ffc963855763 req-6cfa0426-71c4-4713-9738-9f2c891880be service nova] Acquiring lock "refresh_cache-2889c4d1-ac1b-404d-a4f7-2b908557348d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.294704] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.519s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.295196] env[63345]: DEBUG nova.compute.manager [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 611.297680] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.495s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.299015] env[63345]: DEBUG nova.objects.instance [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Lazy-loading 'resources' on Instance uuid 1f595aef-799f-4ca4-be91-e95ef056926c {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 611.351546] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Releasing lock "refresh_cache-2889c4d1-ac1b-404d-a4f7-2b908557348d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.351979] env[63345]: DEBUG nova.compute.manager [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 611.352197] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 611.352502] env[63345]: DEBUG oslo_concurrency.lockutils [req-d1e33f55-53e7-408c-a4b7-ffc963855763 req-6cfa0426-71c4-4713-9738-9f2c891880be service nova] Acquired lock "refresh_cache-2889c4d1-ac1b-404d-a4f7-2b908557348d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.352672] env[63345]: DEBUG nova.network.neutron [req-d1e33f55-53e7-408c-a4b7-ffc963855763 req-6cfa0426-71c4-4713-9738-9f2c891880be service nova] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Refreshing network info cache for port 908a9a32-7233-42bf-a87f-5fe81165e6ac {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 611.353664] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-27f2be48-4eec-4a46-b901-548900d94cff {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.363408] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eefe4c84-2494-467f-9f86-7c1aca9a2bf2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.394570] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2889c4d1-ac1b-404d-a4f7-2b908557348d could not be found. [ 611.394570] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 611.394570] env[63345]: INFO nova.compute.manager [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 611.394570] env[63345]: DEBUG oslo.service.loopingcall [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 611.394570] env[63345]: DEBUG nova.compute.manager [-] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 611.394570] env[63345]: DEBUG nova.network.neutron [-] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 611.431846] env[63345]: DEBUG nova.network.neutron [-] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 611.803798] env[63345]: DEBUG nova.compute.utils [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 611.807187] env[63345]: DEBUG nova.compute.manager [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 611.807782] env[63345]: DEBUG nova.network.neutron [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 611.868626] env[63345]: DEBUG nova.policy [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fce058d27d8e4da19af436b282b37f32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63d7b3facae6416989f763e610cf98f7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 611.900287] env[63345]: DEBUG nova.network.neutron [req-d1e33f55-53e7-408c-a4b7-ffc963855763 req-6cfa0426-71c4-4713-9738-9f2c891880be service nova] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 611.912191] env[63345]: INFO nova.scheduler.client.report [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Deleted allocations for instance 77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54 [ 612.019330] env[63345]: DEBUG nova.network.neutron [req-d1e33f55-53e7-408c-a4b7-ffc963855763 req-6cfa0426-71c4-4713-9738-9f2c891880be service nova] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.172168] env[63345]: DEBUG nova.network.neutron [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Successfully created port: 5e07a532-1360-4ce0-a13a-d9d0b96bbf5d {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 612.294141] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56966e85-807f-4f8a-958f-073f45a15ee4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.302211] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb12a108-40bc-4953-888a-4b5cf7c32ead {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.308135] env[63345]: DEBUG nova.compute.manager [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 612.336190] env[63345]: DEBUG nova.network.neutron [-] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.341915] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79f4920-97ed-46ab-aab9-e0afc3f96d9b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.350095] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8d213e-10eb-4982-80e3-419a50013a4a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.365521] env[63345]: DEBUG nova.compute.provider_tree [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 612.421045] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8a6c5bc0-caa6-4dbe-a700-0852231dd95b tempest-ServerExternalEventsTest-203609284 tempest-ServerExternalEventsTest-203609284-project-member] Lock "77c62cdc-3c3d-4dd2-8cbc-2c89c62a3a54" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.811s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.523743] env[63345]: DEBUG oslo_concurrency.lockutils [req-d1e33f55-53e7-408c-a4b7-ffc963855763 req-6cfa0426-71c4-4713-9738-9f2c891880be service nova] Releasing lock "refresh_cache-2889c4d1-ac1b-404d-a4f7-2b908557348d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 612.524083] env[63345]: DEBUG nova.compute.manager [req-d1e33f55-53e7-408c-a4b7-ffc963855763 req-6cfa0426-71c4-4713-9738-9f2c891880be service nova] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Received event network-vif-deleted-908a9a32-7233-42bf-a87f-5fe81165e6ac {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 612.846036] env[63345]: INFO nova.compute.manager [-] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Took 1.45 seconds to deallocate network for instance. [ 612.848499] env[63345]: DEBUG nova.compute.claims [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 612.848882] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.852172] env[63345]: DEBUG nova.compute.manager [req-7322201a-681b-40b5-950f-70ad07b3098e req-ef423eec-5f15-44a5-b1cd-95208a0658ad service nova] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Received event network-changed-5e07a532-1360-4ce0-a13a-d9d0b96bbf5d {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 612.852294] env[63345]: DEBUG nova.compute.manager [req-7322201a-681b-40b5-950f-70ad07b3098e req-ef423eec-5f15-44a5-b1cd-95208a0658ad service nova] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Refreshing instance network info cache due to event network-changed-5e07a532-1360-4ce0-a13a-d9d0b96bbf5d. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 612.852498] env[63345]: DEBUG oslo_concurrency.lockutils [req-7322201a-681b-40b5-950f-70ad07b3098e req-ef423eec-5f15-44a5-b1cd-95208a0658ad service nova] Acquiring lock "refresh_cache-35a5bd72-403b-467b-ad52-1a1bf4958dbb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.852634] env[63345]: DEBUG oslo_concurrency.lockutils [req-7322201a-681b-40b5-950f-70ad07b3098e req-ef423eec-5f15-44a5-b1cd-95208a0658ad service nova] Acquired lock "refresh_cache-35a5bd72-403b-467b-ad52-1a1bf4958dbb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.853600] env[63345]: DEBUG nova.network.neutron [req-7322201a-681b-40b5-950f-70ad07b3098e req-ef423eec-5f15-44a5-b1cd-95208a0658ad service nova] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Refreshing network info cache for port 5e07a532-1360-4ce0-a13a-d9d0b96bbf5d {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 612.868485] env[63345]: DEBUG nova.scheduler.client.report [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 612.924764] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 613.041501] env[63345]: ERROR nova.compute.manager [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5e07a532-1360-4ce0-a13a-d9d0b96bbf5d, please check neutron logs for more information. [ 613.041501] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 613.041501] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 613.041501] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 613.041501] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 613.041501] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 613.041501] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 613.041501] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 613.041501] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.041501] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 613.041501] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.041501] env[63345]: ERROR nova.compute.manager raise self.value [ 613.041501] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 613.041501] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 613.041501] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.041501] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 613.042231] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.042231] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 613.042231] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5e07a532-1360-4ce0-a13a-d9d0b96bbf5d, please check neutron logs for more information. [ 613.042231] env[63345]: ERROR nova.compute.manager [ 613.042231] env[63345]: Traceback (most recent call last): [ 613.042231] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 613.042231] env[63345]: listener.cb(fileno) [ 613.042231] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 613.042231] env[63345]: result = function(*args, **kwargs) [ 613.042231] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 613.042231] env[63345]: return func(*args, **kwargs) [ 613.042231] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 613.042231] env[63345]: raise e [ 613.042231] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 613.042231] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 613.042231] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 613.042231] env[63345]: created_port_ids = self._update_ports_for_instance( [ 613.042231] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 613.042231] env[63345]: with excutils.save_and_reraise_exception(): [ 613.042231] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.042231] env[63345]: self.force_reraise() [ 613.042231] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.042231] env[63345]: raise self.value [ 613.042231] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 613.042231] env[63345]: updated_port = self._update_port( [ 613.042231] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.042231] env[63345]: _ensure_no_port_binding_failure(port) [ 613.042231] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.042231] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 613.043471] env[63345]: nova.exception.PortBindingFailed: Binding failed for port 5e07a532-1360-4ce0-a13a-d9d0b96bbf5d, please check neutron logs for more information. [ 613.043471] env[63345]: Removing descriptor: 16 [ 613.350404] env[63345]: DEBUG nova.compute.manager [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 613.376455] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.079s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.378671] env[63345]: DEBUG oslo_concurrency.lockutils [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.186s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.380744] env[63345]: INFO nova.compute.claims [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 613.386127] env[63345]: DEBUG nova.virt.hardware [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 613.386358] env[63345]: DEBUG nova.virt.hardware [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 613.386533] env[63345]: DEBUG nova.virt.hardware [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 613.386685] env[63345]: DEBUG nova.virt.hardware [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 613.386838] env[63345]: DEBUG nova.virt.hardware [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 613.386986] env[63345]: DEBUG nova.virt.hardware [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 613.387435] env[63345]: DEBUG nova.virt.hardware [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 613.387822] env[63345]: DEBUG nova.virt.hardware [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 613.387822] env[63345]: DEBUG nova.virt.hardware [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 613.387954] env[63345]: DEBUG nova.virt.hardware [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 613.388147] env[63345]: DEBUG nova.virt.hardware [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 613.389024] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca6c374-ad30-4903-acdb-923cd22e5baf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.397735] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd827ba-ad58-4c48-9f6a-86c688466b5c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.413126] env[63345]: ERROR nova.compute.manager [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5e07a532-1360-4ce0-a13a-d9d0b96bbf5d, please check neutron logs for more information. [ 613.413126] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Traceback (most recent call last): [ 613.413126] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 613.413126] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] yield resources [ 613.413126] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 613.413126] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] self.driver.spawn(context, instance, image_meta, [ 613.413126] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 613.413126] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 613.413126] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 613.413126] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] vm_ref = self.build_virtual_machine(instance, [ 613.413126] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 613.413577] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] vif_infos = vmwarevif.get_vif_info(self._session, [ 613.413577] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 613.413577] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] for vif in network_info: [ 613.413577] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 613.413577] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] return self._sync_wrapper(fn, *args, **kwargs) [ 613.413577] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 613.413577] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] self.wait() [ 613.413577] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 613.413577] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] self[:] = self._gt.wait() [ 613.413577] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 613.413577] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] return self._exit_event.wait() [ 613.413577] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 613.413577] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] current.throw(*self._exc) [ 613.414045] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 613.414045] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] result = function(*args, **kwargs) [ 613.414045] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 613.414045] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] return func(*args, **kwargs) [ 613.414045] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 613.414045] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] raise e [ 613.414045] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 613.414045] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] nwinfo = self.network_api.allocate_for_instance( [ 613.414045] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 613.414045] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] created_port_ids = self._update_ports_for_instance( [ 613.414045] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 613.414045] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] with excutils.save_and_reraise_exception(): [ 613.414045] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.414629] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] self.force_reraise() [ 613.414629] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.414629] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] raise self.value [ 613.414629] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 613.414629] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] updated_port = self._update_port( [ 613.414629] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.414629] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] _ensure_no_port_binding_failure(port) [ 613.414629] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.414629] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] raise exception.PortBindingFailed(port_id=port['id']) [ 613.414629] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] nova.exception.PortBindingFailed: Binding failed for port 5e07a532-1360-4ce0-a13a-d9d0b96bbf5d, please check neutron logs for more information. [ 613.414629] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] [ 613.414629] env[63345]: INFO nova.compute.manager [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Terminating instance [ 613.416223] env[63345]: INFO nova.scheduler.client.report [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Deleted allocations for instance 1f595aef-799f-4ca4-be91-e95ef056926c [ 613.448858] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.522063] env[63345]: DEBUG nova.network.neutron [req-7322201a-681b-40b5-950f-70ad07b3098e req-ef423eec-5f15-44a5-b1cd-95208a0658ad service nova] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 613.593759] env[63345]: DEBUG nova.network.neutron [req-7322201a-681b-40b5-950f-70ad07b3098e req-ef423eec-5f15-44a5-b1cd-95208a0658ad service nova] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.922110] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "refresh_cache-35a5bd72-403b-467b-ad52-1a1bf4958dbb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.924872] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dae5937f-11fc-4e77-b96b-1179f0d567cf tempest-ServersAaction247Test-274552655 tempest-ServersAaction247Test-274552655-project-member] Lock "1f595aef-799f-4ca4-be91-e95ef056926c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.366s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.096986] env[63345]: DEBUG oslo_concurrency.lockutils [req-7322201a-681b-40b5-950f-70ad07b3098e req-ef423eec-5f15-44a5-b1cd-95208a0658ad service nova] Releasing lock "refresh_cache-35a5bd72-403b-467b-ad52-1a1bf4958dbb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.097487] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "refresh_cache-35a5bd72-403b-467b-ad52-1a1bf4958dbb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.097668] env[63345]: DEBUG nova.network.neutron [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 614.626026] env[63345]: DEBUG nova.network.neutron [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 614.781205] env[63345]: DEBUG nova.network.neutron [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.878891] env[63345]: DEBUG nova.compute.manager [req-830c69fd-1b41-49b7-bd52-a11acb0e48da req-84073b69-fa85-4bcd-a2c7-53d6c084c93d service nova] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Received event network-vif-deleted-5e07a532-1360-4ce0-a13a-d9d0b96bbf5d {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 614.899088] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab629f76-69b5-4bb5-861d-f9accab9b27a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.909302] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db161450-9586-4ccb-89a0-08c17db4e99d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.942764] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a914ea15-6e21-4eef-8522-00e812570ea5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.951155] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d44205-d0d0-46c0-b589-9c0a84d68636 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.965164] env[63345]: DEBUG nova.compute.provider_tree [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.287497] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "refresh_cache-35a5bd72-403b-467b-ad52-1a1bf4958dbb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.287929] env[63345]: DEBUG nova.compute.manager [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 615.288146] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 615.288473] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2641349a-80cd-4dd9-9cd7-3c657c67a493 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.298827] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b6d365-2b76-4317-ac82-c07ee86458f9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.325390] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 35a5bd72-403b-467b-ad52-1a1bf4958dbb could not be found. [ 615.325646] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 615.325826] env[63345]: INFO nova.compute.manager [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Took 0.04 seconds to destroy the instance on the hypervisor. [ 615.326086] env[63345]: DEBUG oslo.service.loopingcall [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 615.326512] env[63345]: DEBUG nova.compute.manager [-] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 615.326512] env[63345]: DEBUG nova.network.neutron [-] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 615.349845] env[63345]: DEBUG nova.network.neutron [-] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 615.468711] env[63345]: DEBUG nova.scheduler.client.report [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 615.852659] env[63345]: DEBUG nova.network.neutron [-] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.975143] env[63345]: DEBUG oslo_concurrency.lockutils [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 615.975705] env[63345]: DEBUG nova.compute.manager [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 615.980325] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.756s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.981966] env[63345]: INFO nova.compute.claims [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 616.356203] env[63345]: INFO nova.compute.manager [-] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Took 1.03 seconds to deallocate network for instance. [ 616.359648] env[63345]: DEBUG nova.compute.claims [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 616.359878] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.482382] env[63345]: DEBUG nova.compute.utils [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 616.483791] env[63345]: DEBUG nova.compute.manager [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 616.484612] env[63345]: DEBUG nova.network.neutron [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 616.554401] env[63345]: DEBUG nova.policy [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c8561e8a06fd43ae9329361e310721bd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e1563d396f9d4fd792b40d764d1cb237', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 616.896184] env[63345]: DEBUG nova.network.neutron [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Successfully created port: 5013d56d-b041-4790-9c2f-7f0c6d71ec30 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 616.991279] env[63345]: DEBUG nova.compute.manager [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 617.544646] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba712e24-8ba1-4ee4-9219-2b2779be49b8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.554223] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2350b5f6-33cf-4ddb-800d-07b468096318 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.587681] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42b025c-5812-4de6-9cd5-485f0807beca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.596863] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2adef377-a4e8-4390-a403-a71dfad76b79 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.616654] env[63345]: DEBUG nova.compute.provider_tree [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 617.952180] env[63345]: ERROR nova.compute.manager [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5013d56d-b041-4790-9c2f-7f0c6d71ec30, please check neutron logs for more information. [ 617.952180] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 617.952180] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 617.952180] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 617.952180] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 617.952180] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 617.952180] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 617.952180] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 617.952180] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 617.952180] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 617.952180] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 617.952180] env[63345]: ERROR nova.compute.manager raise self.value [ 617.952180] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 617.952180] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 617.952180] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 617.952180] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 617.952813] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 617.952813] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 617.952813] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5013d56d-b041-4790-9c2f-7f0c6d71ec30, please check neutron logs for more information. [ 617.952813] env[63345]: ERROR nova.compute.manager [ 617.952813] env[63345]: Traceback (most recent call last): [ 617.952813] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 617.952813] env[63345]: listener.cb(fileno) [ 617.952813] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 617.952813] env[63345]: result = function(*args, **kwargs) [ 617.952813] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 617.952813] env[63345]: return func(*args, **kwargs) [ 617.952813] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 617.952813] env[63345]: raise e [ 617.952813] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 617.952813] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 617.952813] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 617.952813] env[63345]: created_port_ids = self._update_ports_for_instance( [ 617.952813] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 617.952813] env[63345]: with excutils.save_and_reraise_exception(): [ 617.952813] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 617.952813] env[63345]: self.force_reraise() [ 617.952813] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 617.952813] env[63345]: raise self.value [ 617.952813] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 617.952813] env[63345]: updated_port = self._update_port( [ 617.952813] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 617.952813] env[63345]: _ensure_no_port_binding_failure(port) [ 617.952813] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 617.952813] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 617.953562] env[63345]: nova.exception.PortBindingFailed: Binding failed for port 5013d56d-b041-4790-9c2f-7f0c6d71ec30, please check neutron logs for more information. [ 617.953562] env[63345]: Removing descriptor: 16 [ 617.998778] env[63345]: DEBUG nova.compute.manager [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 618.033982] env[63345]: DEBUG nova.virt.hardware [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 618.034643] env[63345]: DEBUG nova.virt.hardware [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 618.034938] env[63345]: DEBUG nova.virt.hardware [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 618.035279] env[63345]: DEBUG nova.virt.hardware [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 618.035560] env[63345]: DEBUG nova.virt.hardware [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 618.036139] env[63345]: DEBUG nova.virt.hardware [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 618.036514] env[63345]: DEBUG nova.virt.hardware [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 618.036817] env[63345]: DEBUG nova.virt.hardware [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 618.037244] env[63345]: DEBUG nova.virt.hardware [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 618.037605] env[63345]: DEBUG nova.virt.hardware [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 618.038079] env[63345]: DEBUG nova.virt.hardware [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 618.039680] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e93341e-4b47-4535-a224-cfee36cb890d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.053841] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b265c33a-0234-4c45-9192-95e2da71ce0c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.081321] env[63345]: ERROR nova.compute.manager [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5013d56d-b041-4790-9c2f-7f0c6d71ec30, please check neutron logs for more information. [ 618.081321] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Traceback (most recent call last): [ 618.081321] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 618.081321] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] yield resources [ 618.081321] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 618.081321] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] self.driver.spawn(context, instance, image_meta, [ 618.081321] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 618.081321] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] self._vmops.spawn(context, instance, image_meta, injected_files, [ 618.081321] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 618.081321] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] vm_ref = self.build_virtual_machine(instance, [ 618.081321] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 618.081671] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] vif_infos = vmwarevif.get_vif_info(self._session, [ 618.081671] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 618.081671] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] for vif in network_info: [ 618.081671] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 618.081671] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] return self._sync_wrapper(fn, *args, **kwargs) [ 618.081671] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 618.081671] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] self.wait() [ 618.081671] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 618.081671] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] self[:] = self._gt.wait() [ 618.081671] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 618.081671] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] return self._exit_event.wait() [ 618.081671] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 618.081671] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] current.throw(*self._exc) [ 618.082069] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 618.082069] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] result = function(*args, **kwargs) [ 618.082069] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 618.082069] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] return func(*args, **kwargs) [ 618.082069] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 618.082069] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] raise e [ 618.082069] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 618.082069] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] nwinfo = self.network_api.allocate_for_instance( [ 618.082069] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 618.082069] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] created_port_ids = self._update_ports_for_instance( [ 618.082069] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 618.082069] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] with excutils.save_and_reraise_exception(): [ 618.082069] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.082445] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] self.force_reraise() [ 618.082445] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.082445] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] raise self.value [ 618.082445] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 618.082445] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] updated_port = self._update_port( [ 618.082445] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.082445] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] _ensure_no_port_binding_failure(port) [ 618.082445] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.082445] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] raise exception.PortBindingFailed(port_id=port['id']) [ 618.082445] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] nova.exception.PortBindingFailed: Binding failed for port 5013d56d-b041-4790-9c2f-7f0c6d71ec30, please check neutron logs for more information. [ 618.082445] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] [ 618.082445] env[63345]: INFO nova.compute.manager [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Terminating instance [ 618.121839] env[63345]: DEBUG nova.scheduler.client.report [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 618.133555] env[63345]: DEBUG nova.compute.manager [req-b79b5137-0775-45a3-b2eb-c026ed427be8 req-f962ef5b-ef5c-4d9b-b08e-f0fa06aa2d73 service nova] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Received event network-changed-5013d56d-b041-4790-9c2f-7f0c6d71ec30 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 618.134267] env[63345]: DEBUG nova.compute.manager [req-b79b5137-0775-45a3-b2eb-c026ed427be8 req-f962ef5b-ef5c-4d9b-b08e-f0fa06aa2d73 service nova] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Refreshing instance network info cache due to event network-changed-5013d56d-b041-4790-9c2f-7f0c6d71ec30. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 618.134857] env[63345]: DEBUG oslo_concurrency.lockutils [req-b79b5137-0775-45a3-b2eb-c026ed427be8 req-f962ef5b-ef5c-4d9b-b08e-f0fa06aa2d73 service nova] Acquiring lock "refresh_cache-fb2cdca8-441c-4edb-be11-6b89c19b3cad" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.135105] env[63345]: DEBUG oslo_concurrency.lockutils [req-b79b5137-0775-45a3-b2eb-c026ed427be8 req-f962ef5b-ef5c-4d9b-b08e-f0fa06aa2d73 service nova] Acquired lock "refresh_cache-fb2cdca8-441c-4edb-be11-6b89c19b3cad" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.135464] env[63345]: DEBUG nova.network.neutron [req-b79b5137-0775-45a3-b2eb-c026ed427be8 req-f962ef5b-ef5c-4d9b-b08e-f0fa06aa2d73 service nova] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Refreshing network info cache for port 5013d56d-b041-4790-9c2f-7f0c6d71ec30 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 618.588255] env[63345]: DEBUG oslo_concurrency.lockutils [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Acquiring lock "refresh_cache-fb2cdca8-441c-4edb-be11-6b89c19b3cad" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.640024] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.660s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.640513] env[63345]: DEBUG nova.compute.manager [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 618.648487] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.798s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.650897] env[63345]: INFO nova.compute.claims [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 618.688035] env[63345]: DEBUG nova.network.neutron [req-b79b5137-0775-45a3-b2eb-c026ed427be8 req-f962ef5b-ef5c-4d9b-b08e-f0fa06aa2d73 service nova] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 618.796830] env[63345]: DEBUG nova.network.neutron [req-b79b5137-0775-45a3-b2eb-c026ed427be8 req-f962ef5b-ef5c-4d9b-b08e-f0fa06aa2d73 service nova] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.147830] env[63345]: DEBUG nova.compute.utils [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 619.149382] env[63345]: DEBUG nova.compute.manager [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 619.149602] env[63345]: DEBUG nova.network.neutron [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 619.219401] env[63345]: DEBUG nova.policy [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a085cf3a30664abb8c5bd85164e0bfd1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c19245b0a3c4457c9e7674c2c6619a6b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 619.299249] env[63345]: DEBUG oslo_concurrency.lockutils [req-b79b5137-0775-45a3-b2eb-c026ed427be8 req-f962ef5b-ef5c-4d9b-b08e-f0fa06aa2d73 service nova] Releasing lock "refresh_cache-fb2cdca8-441c-4edb-be11-6b89c19b3cad" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.299796] env[63345]: DEBUG oslo_concurrency.lockutils [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Acquired lock "refresh_cache-fb2cdca8-441c-4edb-be11-6b89c19b3cad" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.300123] env[63345]: DEBUG nova.network.neutron [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 619.492013] env[63345]: DEBUG nova.network.neutron [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Successfully created port: 3d199751-8956-48ca-b152-f05509099c33 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 619.655718] env[63345]: DEBUG nova.compute.manager [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 619.824317] env[63345]: DEBUG nova.network.neutron [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 619.970981] env[63345]: DEBUG nova.network.neutron [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.205880] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885ac806-2976-415a-a401-2be0928e6e4a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.216991] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98014c0-325a-49d8-8c3a-aef4ff92f79e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.252651] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c02f52-df7f-42d7-91e7-afce3633f974 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.258617] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ad138e-abd7-4ff1-8e8a-7fbf377f67d4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.279896] env[63345]: DEBUG nova.compute.provider_tree [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.377843] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Acquiring lock "64fcf837-1d9d-41b1-a2a1-3c16362932cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.378308] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Lock "64fcf837-1d9d-41b1-a2a1-3c16362932cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.474461] env[63345]: DEBUG oslo_concurrency.lockutils [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Releasing lock "refresh_cache-fb2cdca8-441c-4edb-be11-6b89c19b3cad" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.475422] env[63345]: DEBUG nova.compute.manager [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 620.475831] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 620.476454] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-450d2cf2-6e72-46b9-89ac-ccdefe422843 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.488138] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26673d85-b5a8-4401-8d92-16cc72968ea5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.511696] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fb2cdca8-441c-4edb-be11-6b89c19b3cad could not be found. [ 620.511938] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 620.512135] env[63345]: INFO nova.compute.manager [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Took 0.04 seconds to destroy the instance on the hypervisor. [ 620.512383] env[63345]: DEBUG oslo.service.loopingcall [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 620.512606] env[63345]: DEBUG nova.compute.manager [-] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 620.512752] env[63345]: DEBUG nova.network.neutron [-] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 620.535210] env[63345]: ERROR nova.compute.manager [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3d199751-8956-48ca-b152-f05509099c33, please check neutron logs for more information. [ 620.535210] env[63345]: ERROR nova.compute.manager Traceback (most recent call last): [ 620.535210] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 620.535210] env[63345]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 620.535210] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 620.535210] env[63345]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 620.535210] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 620.535210] env[63345]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 620.535210] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 620.535210] env[63345]: ERROR nova.compute.manager self.force_reraise() [ 620.535210] env[63345]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 620.535210] env[63345]: ERROR nova.compute.manager raise self.value [ 620.535210] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 620.535210] env[63345]: ERROR nova.compute.manager updated_port = self._update_port( [ 620.535210] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 620.535210] env[63345]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 620.535715] env[63345]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 620.535715] env[63345]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 620.535715] env[63345]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3d199751-8956-48ca-b152-f05509099c33, please check neutron logs for more information. [ 620.535715] env[63345]: ERROR nova.compute.manager [ 620.535715] env[63345]: Traceback (most recent call last): [ 620.535715] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 620.535715] env[63345]: listener.cb(fileno) [ 620.535715] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 620.535715] env[63345]: result = function(*args, **kwargs) [ 620.535715] env[63345]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 620.535715] env[63345]: return func(*args, **kwargs) [ 620.535715] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 620.535715] env[63345]: raise e [ 620.535715] env[63345]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 620.535715] env[63345]: nwinfo = self.network_api.allocate_for_instance( [ 620.535715] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 620.535715] env[63345]: created_port_ids = self._update_ports_for_instance( [ 620.535715] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 620.535715] env[63345]: with excutils.save_and_reraise_exception(): [ 620.535715] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 620.535715] env[63345]: self.force_reraise() [ 620.535715] env[63345]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 620.535715] env[63345]: raise self.value [ 620.535715] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 620.535715] env[63345]: updated_port = self._update_port( [ 620.535715] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 620.535715] env[63345]: _ensure_no_port_binding_failure(port) [ 620.535715] env[63345]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 620.535715] env[63345]: raise exception.PortBindingFailed(port_id=port['id']) [ 620.536442] env[63345]: nova.exception.PortBindingFailed: Binding failed for port 3d199751-8956-48ca-b152-f05509099c33, please check neutron logs for more information. [ 620.536442] env[63345]: Removing descriptor: 16 [ 620.539751] env[63345]: DEBUG nova.network.neutron [-] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 620.671654] env[63345]: DEBUG nova.compute.manager [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 620.712351] env[63345]: DEBUG nova.virt.hardware [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 620.712677] env[63345]: DEBUG nova.virt.hardware [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 620.713086] env[63345]: DEBUG nova.virt.hardware [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 620.713086] env[63345]: DEBUG nova.virt.hardware [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 620.713297] env[63345]: DEBUG nova.virt.hardware [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 620.713595] env[63345]: DEBUG nova.virt.hardware [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 620.716838] env[63345]: DEBUG nova.virt.hardware [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 620.717120] env[63345]: DEBUG nova.virt.hardware [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 620.717369] env[63345]: DEBUG nova.virt.hardware [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 620.717581] env[63345]: DEBUG nova.virt.hardware [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 620.718026] env[63345]: DEBUG nova.virt.hardware [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 620.718956] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b672ee4-3e32-4234-856a-3351b443d0f0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.728171] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e41407-0a40-4f48-964a-eac82ac3b6e8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.750633] env[63345]: ERROR nova.compute.manager [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3d199751-8956-48ca-b152-f05509099c33, please check neutron logs for more information. [ 620.750633] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Traceback (most recent call last): [ 620.750633] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 620.750633] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] yield resources [ 620.750633] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 620.750633] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] self.driver.spawn(context, instance, image_meta, [ 620.750633] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 620.750633] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 620.750633] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 620.750633] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] vm_ref = self.build_virtual_machine(instance, [ 620.750633] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 620.751031] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] vif_infos = vmwarevif.get_vif_info(self._session, [ 620.751031] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 620.751031] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] for vif in network_info: [ 620.751031] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 620.751031] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] return self._sync_wrapper(fn, *args, **kwargs) [ 620.751031] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 620.751031] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] self.wait() [ 620.751031] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 620.751031] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] self[:] = self._gt.wait() [ 620.751031] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 620.751031] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] return self._exit_event.wait() [ 620.751031] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 620.751031] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] current.throw(*self._exc) [ 620.752465] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 620.752465] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] result = function(*args, **kwargs) [ 620.752465] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 620.752465] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] return func(*args, **kwargs) [ 620.752465] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 620.752465] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] raise e [ 620.752465] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 620.752465] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] nwinfo = self.network_api.allocate_for_instance( [ 620.752465] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 620.752465] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] created_port_ids = self._update_ports_for_instance( [ 620.752465] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 620.752465] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] with excutils.save_and_reraise_exception(): [ 620.752465] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 620.752818] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] self.force_reraise() [ 620.752818] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 620.752818] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] raise self.value [ 620.752818] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 620.752818] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] updated_port = self._update_port( [ 620.752818] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 620.752818] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] _ensure_no_port_binding_failure(port) [ 620.752818] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 620.752818] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] raise exception.PortBindingFailed(port_id=port['id']) [ 620.752818] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] nova.exception.PortBindingFailed: Binding failed for port 3d199751-8956-48ca-b152-f05509099c33, please check neutron logs for more information. [ 620.752818] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] [ 620.752818] env[63345]: INFO nova.compute.manager [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Terminating instance [ 620.780839] env[63345]: DEBUG nova.scheduler.client.report [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 620.797298] env[63345]: DEBUG nova.compute.manager [req-d8abf6a5-124a-48b5-8215-312dbfd5e974 req-6c8ae8d9-7309-47b6-ad49-e609a47cf06e service nova] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Received event network-vif-deleted-5013d56d-b041-4790-9c2f-7f0c6d71ec30 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 621.045920] env[63345]: DEBUG nova.network.neutron [-] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.254323] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Acquiring lock "refresh_cache-56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.254639] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Acquired lock "refresh_cache-56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.254705] env[63345]: DEBUG nova.network.neutron [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 621.286707] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.638s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.287280] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 621.289947] env[63345]: DEBUG oslo_concurrency.lockutils [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.826s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.548646] env[63345]: INFO nova.compute.manager [-] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Took 1.04 seconds to deallocate network for instance. [ 621.551556] env[63345]: DEBUG nova.compute.claims [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 621.551755] env[63345]: DEBUG oslo_concurrency.lockutils [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.788832] env[63345]: DEBUG nova.network.neutron [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 621.794148] env[63345]: DEBUG nova.compute.utils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 621.798311] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 621.798504] env[63345]: DEBUG nova.network.neutron [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 621.856363] env[63345]: DEBUG nova.policy [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '40ee4048c6d843308be28da1a5906015', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '572be07120b44a488924d794c7db100c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 622.035032] env[63345]: DEBUG nova.network.neutron [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.146243] env[63345]: DEBUG nova.network.neutron [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Successfully created port: 8dd82e3c-9961-493e-82fc-5ccb8542af34 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 622.267634] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d228ecb0-056c-4d4d-86f9-2f1e2d2efb31 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.275452] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3112b26-7064-4e72-a772-3cfb52da928b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.311143] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 622.315235] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01028d43-e6f7-477f-883e-7d84e57b7b49 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.327662] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca69a15f-8407-48af-bebd-3fef0be5fbd9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.344597] env[63345]: DEBUG nova.compute.provider_tree [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 622.543945] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Releasing lock "refresh_cache-56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.544365] env[63345]: DEBUG nova.compute.manager [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 622.544578] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 622.544912] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-27910b47-7588-4d6e-87ea-1dd68adec58f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.555546] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c8f455-6f6c-4327-b630-bc4f9fa769c2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.580575] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd could not be found. [ 622.580718] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 622.580907] env[63345]: INFO nova.compute.manager [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 622.581175] env[63345]: DEBUG oslo.service.loopingcall [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 622.581378] env[63345]: DEBUG nova.compute.manager [-] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 622.581588] env[63345]: DEBUG nova.network.neutron [-] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 622.615377] env[63345]: DEBUG nova.network.neutron [-] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 622.869310] env[63345]: ERROR nova.scheduler.client.report [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [req-e63b2168-ea8b-421d-9072-afbf9f9d515e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e63b2168-ea8b-421d-9072-afbf9f9d515e"}]}: nova.exception.PortBindingFailed: Binding failed for port bb0b3f3b-ae46-452c-a6d5-2f2f581a8e08, please check neutron logs for more information. [ 622.889410] env[63345]: DEBUG nova.scheduler.client.report [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 622.907297] env[63345]: DEBUG nova.scheduler.client.report [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 622.907536] env[63345]: DEBUG nova.compute.provider_tree [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 622.919852] env[63345]: DEBUG nova.scheduler.client.report [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 622.941124] env[63345]: DEBUG nova.scheduler.client.report [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 622.993892] env[63345]: DEBUG nova.compute.manager [req-e2a0bc04-65b1-41bd-b864-30e8627f9bf8 req-fbadafa9-9a36-42d5-8f57-ead85c4b97a3 service nova] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Received event network-changed-3d199751-8956-48ca-b152-f05509099c33 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 622.994029] env[63345]: DEBUG nova.compute.manager [req-e2a0bc04-65b1-41bd-b864-30e8627f9bf8 req-fbadafa9-9a36-42d5-8f57-ead85c4b97a3 service nova] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Refreshing instance network info cache due to event network-changed-3d199751-8956-48ca-b152-f05509099c33. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 622.994240] env[63345]: DEBUG oslo_concurrency.lockutils [req-e2a0bc04-65b1-41bd-b864-30e8627f9bf8 req-fbadafa9-9a36-42d5-8f57-ead85c4b97a3 service nova] Acquiring lock "refresh_cache-56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.994424] env[63345]: DEBUG oslo_concurrency.lockutils [req-e2a0bc04-65b1-41bd-b864-30e8627f9bf8 req-fbadafa9-9a36-42d5-8f57-ead85c4b97a3 service nova] Acquired lock "refresh_cache-56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.994583] env[63345]: DEBUG nova.network.neutron [req-e2a0bc04-65b1-41bd-b864-30e8627f9bf8 req-fbadafa9-9a36-42d5-8f57-ead85c4b97a3 service nova] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Refreshing network info cache for port 3d199751-8956-48ca-b152-f05509099c33 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 623.117940] env[63345]: DEBUG nova.network.neutron [-] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.329271] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 623.351364] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3201bf1c-686c-4bea-be35-8583ea3df080 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.356025] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 623.356260] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 623.356420] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 623.356601] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 623.356748] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 623.356894] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 623.357114] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 623.357280] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 623.357450] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 623.357614] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 623.357781] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 623.358561] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c9641e1-36ba-4886-ac84-b12fce403347 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.367992] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e22232-c863-4e16-8c37-36310f2d8314 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.372475] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d32612-97fb-484a-9f16-b13f0fcdf51c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.411008] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bfb71e-7469-4408-998a-52b1a43664df {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.419328] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbfd466-7d57-4bf6-9a24-a17c30747a1b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.433307] env[63345]: DEBUG nova.compute.provider_tree [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 623.435895] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 623.436068] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 623.518966] env[63345]: DEBUG nova.network.neutron [req-e2a0bc04-65b1-41bd-b864-30e8627f9bf8 req-fbadafa9-9a36-42d5-8f57-ead85c4b97a3 service nova] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 623.623636] env[63345]: INFO nova.compute.manager [-] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Took 1.04 seconds to deallocate network for instance. [ 623.625144] env[63345]: DEBUG nova.compute.claims [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Aborting claim: {{(pid=63345) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 623.625144] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.627573] env[63345]: DEBUG nova.network.neutron [req-e2a0bc04-65b1-41bd-b864-30e8627f9bf8 req-fbadafa9-9a36-42d5-8f57-ead85c4b97a3 service nova] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.719362] env[63345]: DEBUG nova.network.neutron [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Successfully updated port: 8dd82e3c-9961-493e-82fc-5ccb8542af34 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 623.955571] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 623.955751] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Starting heal instance info cache {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 623.955872] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Rebuilding the list of instances to heal {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10261}} [ 623.978852] env[63345]: DEBUG nova.scheduler.client.report [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 57 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 623.979111] env[63345]: DEBUG nova.compute.provider_tree [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 57 to 58 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 623.979319] env[63345]: DEBUG nova.compute.provider_tree [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 624.130157] env[63345]: DEBUG oslo_concurrency.lockutils [req-e2a0bc04-65b1-41bd-b864-30e8627f9bf8 req-fbadafa9-9a36-42d5-8f57-ead85c4b97a3 service nova] Releasing lock "refresh_cache-56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.130157] env[63345]: DEBUG nova.compute.manager [req-e2a0bc04-65b1-41bd-b864-30e8627f9bf8 req-fbadafa9-9a36-42d5-8f57-ead85c4b97a3 service nova] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Received event network-vif-deleted-3d199751-8956-48ca-b152-f05509099c33 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 624.222519] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "refresh_cache-ee31689b-bf0b-4737-86c7-5451c763e603" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.223264] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquired lock "refresh_cache-ee31689b-bf0b-4737-86c7-5451c763e603" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.223264] env[63345]: DEBUG nova.network.neutron [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 624.463017] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 624.463017] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 624.463017] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 624.463017] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 624.463017] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 624.463017] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 624.463414] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 624.463414] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Didn't find any instances for network info cache update. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10343}} [ 624.463414] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.463414] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.463414] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.463414] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.463560] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.463560] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.463560] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63345) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 624.463560] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager.update_available_resource {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.489019] env[63345]: DEBUG oslo_concurrency.lockutils [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 3.196s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.489019] env[63345]: ERROR nova.compute.manager [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bb0b3f3b-ae46-452c-a6d5-2f2f581a8e08, please check neutron logs for more information. [ 624.489019] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Traceback (most recent call last): [ 624.489019] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 624.489019] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] self.driver.spawn(context, instance, image_meta, [ 624.489019] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 624.489019] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 624.489019] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 624.489019] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] vm_ref = self.build_virtual_machine(instance, [ 624.489333] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 624.489333] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] vif_infos = vmwarevif.get_vif_info(self._session, [ 624.489333] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 624.489333] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] for vif in network_info: [ 624.489333] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 624.489333] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] return self._sync_wrapper(fn, *args, **kwargs) [ 624.489333] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 624.489333] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] self.wait() [ 624.489333] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 624.489333] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] self[:] = self._gt.wait() [ 624.489333] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 624.489333] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] return self._exit_event.wait() [ 624.489333] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 624.489620] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] result = hub.switch() [ 624.489620] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 624.489620] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] return self.greenlet.switch() [ 624.489620] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 624.489620] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] result = function(*args, **kwargs) [ 624.489620] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 624.489620] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] return func(*args, **kwargs) [ 624.489620] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 624.489620] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] raise e [ 624.489620] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 624.489620] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] nwinfo = self.network_api.allocate_for_instance( [ 624.489620] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 624.489620] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] created_port_ids = self._update_ports_for_instance( [ 624.489910] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 624.489910] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] with excutils.save_and_reraise_exception(): [ 624.489910] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 624.489910] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] self.force_reraise() [ 624.489910] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 624.489910] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] raise self.value [ 624.489910] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 624.489910] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] updated_port = self._update_port( [ 624.489910] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 624.489910] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] _ensure_no_port_binding_failure(port) [ 624.489910] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 624.489910] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] raise exception.PortBindingFailed(port_id=port['id']) [ 624.490194] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] nova.exception.PortBindingFailed: Binding failed for port bb0b3f3b-ae46-452c-a6d5-2f2f581a8e08, please check neutron logs for more information. [ 624.490194] env[63345]: ERROR nova.compute.manager [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] [ 624.490194] env[63345]: DEBUG nova.compute.utils [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Binding failed for port bb0b3f3b-ae46-452c-a6d5-2f2f581a8e08, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 624.490194] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.986s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.494159] env[63345]: INFO nova.compute.claims [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 624.494557] env[63345]: DEBUG nova.compute.manager [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Build of instance 1a54db9b-0482-4038-a505-46447f0c33ef was re-scheduled: Binding failed for port bb0b3f3b-ae46-452c-a6d5-2f2f581a8e08, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 624.495146] env[63345]: DEBUG nova.compute.manager [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 624.495507] env[63345]: DEBUG oslo_concurrency.lockutils [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Acquiring lock "refresh_cache-1a54db9b-0482-4038-a505-46447f0c33ef" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.495788] env[63345]: DEBUG oslo_concurrency.lockutils [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Acquired lock "refresh_cache-1a54db9b-0482-4038-a505-46447f0c33ef" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.496086] env[63345]: DEBUG nova.network.neutron [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 624.762754] env[63345]: DEBUG nova.network.neutron [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 624.945585] env[63345]: DEBUG nova.network.neutron [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Updating instance_info_cache with network_info: [{"id": "8dd82e3c-9961-493e-82fc-5ccb8542af34", "address": "fa:16:3e:c2:61:f0", "network": {"id": "e1d0fcb4-77e3-4c6a-96ea-178593ec00ef", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1852131446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572be07120b44a488924d794c7db100c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8dd82e3c-99", "ovs_interfaceid": "8dd82e3c-9961-493e-82fc-5ccb8542af34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.966532] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.025528] env[63345]: DEBUG nova.network.neutron [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 625.057525] env[63345]: DEBUG nova.compute.manager [req-ead9a1f7-49ca-40d2-b8f4-4820493e8871 req-60b88d4f-0c7f-4e5e-af68-4d7e25e94466 service nova] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Received event network-vif-plugged-8dd82e3c-9961-493e-82fc-5ccb8542af34 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 625.057761] env[63345]: DEBUG oslo_concurrency.lockutils [req-ead9a1f7-49ca-40d2-b8f4-4820493e8871 req-60b88d4f-0c7f-4e5e-af68-4d7e25e94466 service nova] Acquiring lock "ee31689b-bf0b-4737-86c7-5451c763e603-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.057990] env[63345]: DEBUG oslo_concurrency.lockutils [req-ead9a1f7-49ca-40d2-b8f4-4820493e8871 req-60b88d4f-0c7f-4e5e-af68-4d7e25e94466 service nova] Lock "ee31689b-bf0b-4737-86c7-5451c763e603-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.058152] env[63345]: DEBUG oslo_concurrency.lockutils [req-ead9a1f7-49ca-40d2-b8f4-4820493e8871 req-60b88d4f-0c7f-4e5e-af68-4d7e25e94466 service nova] Lock "ee31689b-bf0b-4737-86c7-5451c763e603-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.058321] env[63345]: DEBUG nova.compute.manager [req-ead9a1f7-49ca-40d2-b8f4-4820493e8871 req-60b88d4f-0c7f-4e5e-af68-4d7e25e94466 service nova] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] No waiting events found dispatching network-vif-plugged-8dd82e3c-9961-493e-82fc-5ccb8542af34 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 625.058486] env[63345]: WARNING nova.compute.manager [req-ead9a1f7-49ca-40d2-b8f4-4820493e8871 req-60b88d4f-0c7f-4e5e-af68-4d7e25e94466 service nova] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Received unexpected event network-vif-plugged-8dd82e3c-9961-493e-82fc-5ccb8542af34 for instance with vm_state building and task_state spawning. [ 625.058654] env[63345]: DEBUG nova.compute.manager [req-ead9a1f7-49ca-40d2-b8f4-4820493e8871 req-60b88d4f-0c7f-4e5e-af68-4d7e25e94466 service nova] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Received event network-changed-8dd82e3c-9961-493e-82fc-5ccb8542af34 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 625.058847] env[63345]: DEBUG nova.compute.manager [req-ead9a1f7-49ca-40d2-b8f4-4820493e8871 req-60b88d4f-0c7f-4e5e-af68-4d7e25e94466 service nova] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Refreshing instance network info cache due to event network-changed-8dd82e3c-9961-493e-82fc-5ccb8542af34. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 625.059304] env[63345]: DEBUG oslo_concurrency.lockutils [req-ead9a1f7-49ca-40d2-b8f4-4820493e8871 req-60b88d4f-0c7f-4e5e-af68-4d7e25e94466 service nova] Acquiring lock "refresh_cache-ee31689b-bf0b-4737-86c7-5451c763e603" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.073582] env[63345]: DEBUG nova.network.neutron [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.447106] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Releasing lock "refresh_cache-ee31689b-bf0b-4737-86c7-5451c763e603" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.447469] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Instance network_info: |[{"id": "8dd82e3c-9961-493e-82fc-5ccb8542af34", "address": "fa:16:3e:c2:61:f0", "network": {"id": "e1d0fcb4-77e3-4c6a-96ea-178593ec00ef", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1852131446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572be07120b44a488924d794c7db100c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8dd82e3c-99", "ovs_interfaceid": "8dd82e3c-9961-493e-82fc-5ccb8542af34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 625.447809] env[63345]: DEBUG oslo_concurrency.lockutils [req-ead9a1f7-49ca-40d2-b8f4-4820493e8871 req-60b88d4f-0c7f-4e5e-af68-4d7e25e94466 service nova] Acquired lock "refresh_cache-ee31689b-bf0b-4737-86c7-5451c763e603" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.447990] env[63345]: DEBUG nova.network.neutron [req-ead9a1f7-49ca-40d2-b8f4-4820493e8871 req-60b88d4f-0c7f-4e5e-af68-4d7e25e94466 service nova] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Refreshing network info cache for port 8dd82e3c-9961-493e-82fc-5ccb8542af34 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 625.449359] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:61:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae70d41-6ebf-472a-8504-6530eb37ea41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8dd82e3c-9961-493e-82fc-5ccb8542af34', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 625.466342] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Creating folder: Project (572be07120b44a488924d794c7db100c). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 625.470163] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a6ead07-5cab-4cce-a1fb-6ac6546be1cf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.482646] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Created folder: Project (572be07120b44a488924d794c7db100c) in parent group-v225918. [ 625.482849] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Creating folder: Instances. Parent ref: group-v225938. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 625.483091] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90f320d7-690c-4826-bedb-3031d9696df7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.493700] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Created folder: Instances in parent group-v225938. [ 625.493947] env[63345]: DEBUG oslo.service.loopingcall [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 625.494160] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 625.494365] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-99ebfdcc-c84f-4969-b25b-50320528556c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.522057] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 625.522057] env[63345]: value = "task-1016688" [ 625.522057] env[63345]: _type = "Task" [ 625.522057] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.534162] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016688, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.577422] env[63345]: DEBUG oslo_concurrency.lockutils [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Releasing lock "refresh_cache-1a54db9b-0482-4038-a505-46447f0c33ef" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.577422] env[63345]: DEBUG nova.compute.manager [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 625.577422] env[63345]: DEBUG nova.compute.manager [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 625.577422] env[63345]: DEBUG nova.network.neutron [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 625.602397] env[63345]: DEBUG nova.network.neutron [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 625.980516] env[63345]: DEBUG nova.network.neutron [req-ead9a1f7-49ca-40d2-b8f4-4820493e8871 req-60b88d4f-0c7f-4e5e-af68-4d7e25e94466 service nova] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Updated VIF entry in instance network info cache for port 8dd82e3c-9961-493e-82fc-5ccb8542af34. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 625.980921] env[63345]: DEBUG nova.network.neutron [req-ead9a1f7-49ca-40d2-b8f4-4820493e8871 req-60b88d4f-0c7f-4e5e-af68-4d7e25e94466 service nova] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Updating instance_info_cache with network_info: [{"id": "8dd82e3c-9961-493e-82fc-5ccb8542af34", "address": "fa:16:3e:c2:61:f0", "network": {"id": "e1d0fcb4-77e3-4c6a-96ea-178593ec00ef", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1852131446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572be07120b44a488924d794c7db100c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8dd82e3c-99", "ovs_interfaceid": "8dd82e3c-9961-493e-82fc-5ccb8542af34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.004353] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6408a537-8f30-462b-b91b-815528bd849e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.014189] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182fa857-43f1-4d88-9692-558c9958a93c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.054076] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28de5044-75a6-4076-9a05-be99cc970faa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.059704] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016688, 'name': CreateVM_Task, 'duration_secs': 0.366926} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.060524] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 626.065120] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f527150-a2e0-4c9c-8090-47875a7c730b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.071240] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.071413] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.071741] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 626.071970] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3deea0dc-5dce-4a03-812d-6c9f32fa9fc6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.084695] env[63345]: DEBUG nova.compute.provider_tree [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.088669] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 626.088669] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b69759-4da4-e02e-6f4f-c8b0bf82f930" [ 626.088669] env[63345]: _type = "Task" [ 626.088669] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.097734] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b69759-4da4-e02e-6f4f-c8b0bf82f930, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.104404] env[63345]: DEBUG nova.network.neutron [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.397036] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Acquiring lock "3b0d115d-dad5-4881-a0e0-b98f555da533" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.397036] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Lock "3b0d115d-dad5-4881-a0e0-b98f555da533" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.485769] env[63345]: DEBUG oslo_concurrency.lockutils [req-ead9a1f7-49ca-40d2-b8f4-4820493e8871 req-60b88d4f-0c7f-4e5e-af68-4d7e25e94466 service nova] Releasing lock "refresh_cache-ee31689b-bf0b-4737-86c7-5451c763e603" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.587710] env[63345]: DEBUG nova.scheduler.client.report [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 626.601725] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b69759-4da4-e02e-6f4f-c8b0bf82f930, 'name': SearchDatastore_Task, 'duration_secs': 0.012653} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.602034] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.602274] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 626.602504] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.602652] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.602822] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 626.603083] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8fbcbca8-2922-4249-a83f-d30ac6c757c1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.607050] env[63345]: INFO nova.compute.manager [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] [instance: 1a54db9b-0482-4038-a505-46447f0c33ef] Took 1.03 seconds to deallocate network for instance. [ 626.614495] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 626.618018] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 626.618018] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48c8ed66-9ff3-441a-9b06-e66c857d0f15 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.636210] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 626.636210] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5222d0da-11fd-884f-e73b-9663fa9fd49b" [ 626.636210] env[63345]: _type = "Task" [ 626.636210] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.644741] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5222d0da-11fd-884f-e73b-9663fa9fd49b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.096554] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.608s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.097104] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 627.099667] env[63345]: DEBUG oslo_concurrency.lockutils [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.654s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.150945] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5222d0da-11fd-884f-e73b-9663fa9fd49b, 'name': SearchDatastore_Task, 'duration_secs': 0.01041} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.151799] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f66f7c8e-db93-4486-999a-b4cf644b88ca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.160358] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 627.160358] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5215fbd5-b645-32e3-a43f-628d14f6f01c" [ 627.160358] env[63345]: _type = "Task" [ 627.160358] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.172066] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5215fbd5-b645-32e3-a43f-628d14f6f01c, 'name': SearchDatastore_Task, 'duration_secs': 0.009539} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.172343] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.172629] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] ee31689b-bf0b-4737-86c7-5451c763e603/ee31689b-bf0b-4737-86c7-5451c763e603.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 627.172913] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b28bf64-f478-4998-a451-ab90b3e95501 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.180412] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 627.180412] env[63345]: value = "task-1016689" [ 627.180412] env[63345]: _type = "Task" [ 627.180412] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.189590] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016689, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.605340] env[63345]: DEBUG nova.compute.utils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 627.610365] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 627.610612] env[63345]: DEBUG nova.network.neutron [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 627.647735] env[63345]: INFO nova.scheduler.client.report [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Deleted allocations for instance 1a54db9b-0482-4038-a505-46447f0c33ef [ 627.690486] env[63345]: DEBUG nova.policy [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '40ee4048c6d843308be28da1a5906015', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '572be07120b44a488924d794c7db100c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 627.696412] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016689, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.111433] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 628.159917] env[63345]: DEBUG nova.network.neutron [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Successfully created port: 973be2f9-7f10-4bd3-996a-1ca7b92520c4 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 628.162282] env[63345]: DEBUG oslo_concurrency.lockutils [None req-994a51d0-00b8-426a-996a-32235561cbcb tempest-ListImageFiltersTestJSON-2136310875 tempest-ListImageFiltersTestJSON-2136310875-project-member] Lock "1a54db9b-0482-4038-a505-46447f0c33ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.457s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.196111] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016689, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515881} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.196111] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] ee31689b-bf0b-4737-86c7-5451c763e603/ee31689b-bf0b-4737-86c7-5451c763e603.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 628.196450] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 628.196773] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7b4b4eae-fdad-4e33-89d9-c80e04dbc43b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.206199] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 628.206199] env[63345]: value = "task-1016690" [ 628.206199] env[63345]: _type = "Task" [ 628.206199] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.220721] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016690, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.238974] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2811bff6-5f5f-49c4-a904-2b4a53d54000 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.246947] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a613fb-2eb6-4779-981c-2e262f5e215c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.278487] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd51a56-007c-4845-be79-14e74e3c9715 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.291832] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d369dfb6-7ea8-4123-9fff-6efffc5cb3ac {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.305410] env[63345]: DEBUG nova.compute.provider_tree [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 628.664752] env[63345]: DEBUG nova.compute.manager [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 628.718193] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016690, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077821} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.718480] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 628.719351] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034690b3-f184-4a90-be20-0a97d353d081 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.748247] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] ee31689b-bf0b-4737-86c7-5451c763e603/ee31689b-bf0b-4737-86c7-5451c763e603.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 628.748809] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1840577a-a6e3-43bf-89fd-19bbf409b2d3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.770989] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 628.770989] env[63345]: value = "task-1016691" [ 628.770989] env[63345]: _type = "Task" [ 628.770989] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.782432] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016691, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.830313] env[63345]: ERROR nova.scheduler.client.report [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [req-7efe156c-1551-43b0-bb30-b7da56a7f94c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7efe156c-1551-43b0-bb30-b7da56a7f94c"}]}: nova.exception.PortBindingFailed: Binding failed for port 22d34ad5-6afb-4378-b16e-db51ac9d8c93, please check neutron logs for more information. [ 628.846587] env[63345]: DEBUG nova.scheduler.client.report [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 628.869400] env[63345]: DEBUG nova.scheduler.client.report [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 628.869732] env[63345]: DEBUG nova.compute.provider_tree [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 188, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 628.886777] env[63345]: DEBUG nova.scheduler.client.report [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 628.912701] env[63345]: DEBUG nova.scheduler.client.report [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 629.128514] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 629.166554] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 629.166795] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 629.166957] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 629.167165] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 629.167328] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 629.167499] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 629.167709] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 629.167875] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 629.171280] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 629.171773] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 629.171773] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 629.174931] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61d4af6-efd3-4697-9ff7-99e95b287308 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.189803] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d733d65a-84dc-486f-9923-5cf711720d9f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.194728] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.282924] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016691, 'name': ReconfigVM_Task, 'duration_secs': 0.384995} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.283221] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Reconfigured VM instance instance-00000019 to attach disk [datastore2] ee31689b-bf0b-4737-86c7-5451c763e603/ee31689b-bf0b-4737-86c7-5451c763e603.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 629.283877] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0fea0bdd-29ff-49a8-a195-b57fbf8e5b12 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.291363] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 629.291363] env[63345]: value = "task-1016692" [ 629.291363] env[63345]: _type = "Task" [ 629.291363] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.303377] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016692, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.501769] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0371558c-ec38-4370-ad31-441ac5b51f1d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.513711] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be04a3ef-4d2f-4b20-a8eb-5a9d8e64c420 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.547580] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af114600-fa8c-4ed9-82f1-883d69503b2e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.555473] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e7569f-6cb5-4486-ac8b-4f90def77450 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.572926] env[63345]: DEBUG nova.compute.provider_tree [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 629.804478] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016692, 'name': Rename_Task, 'duration_secs': 0.151813} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.806167] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 629.806167] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b80e9d37-2520-4f8b-9b5c-40f97e37caa8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.816224] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 629.816224] env[63345]: value = "task-1016693" [ 629.816224] env[63345]: _type = "Task" [ 629.816224] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.834272] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016693, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.897688] env[63345]: DEBUG nova.network.neutron [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Successfully updated port: 973be2f9-7f10-4bd3-996a-1ca7b92520c4 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 630.005703] env[63345]: DEBUG nova.compute.manager [req-940cfc32-7ed2-4ef2-a364-066a1d9e1f77 req-70f01009-ea42-4408-ba67-a8e9fd881612 service nova] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Received event network-vif-plugged-973be2f9-7f10-4bd3-996a-1ca7b92520c4 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 630.005922] env[63345]: DEBUG oslo_concurrency.lockutils [req-940cfc32-7ed2-4ef2-a364-066a1d9e1f77 req-70f01009-ea42-4408-ba67-a8e9fd881612 service nova] Acquiring lock "28caa5f5-141a-4ef9-abb3-33a1973d99cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.006255] env[63345]: DEBUG oslo_concurrency.lockutils [req-940cfc32-7ed2-4ef2-a364-066a1d9e1f77 req-70f01009-ea42-4408-ba67-a8e9fd881612 service nova] Lock "28caa5f5-141a-4ef9-abb3-33a1973d99cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.006380] env[63345]: DEBUG oslo_concurrency.lockutils [req-940cfc32-7ed2-4ef2-a364-066a1d9e1f77 req-70f01009-ea42-4408-ba67-a8e9fd881612 service nova] Lock "28caa5f5-141a-4ef9-abb3-33a1973d99cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.006472] env[63345]: DEBUG nova.compute.manager [req-940cfc32-7ed2-4ef2-a364-066a1d9e1f77 req-70f01009-ea42-4408-ba67-a8e9fd881612 service nova] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] No waiting events found dispatching network-vif-plugged-973be2f9-7f10-4bd3-996a-1ca7b92520c4 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 630.006629] env[63345]: WARNING nova.compute.manager [req-940cfc32-7ed2-4ef2-a364-066a1d9e1f77 req-70f01009-ea42-4408-ba67-a8e9fd881612 service nova] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Received unexpected event network-vif-plugged-973be2f9-7f10-4bd3-996a-1ca7b92520c4 for instance with vm_state building and task_state spawning. [ 630.119791] env[63345]: DEBUG nova.scheduler.client.report [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 59 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 630.121657] env[63345]: DEBUG nova.compute.provider_tree [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 59 to 60 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 630.121657] env[63345]: DEBUG nova.compute.provider_tree [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 630.335019] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016693, 'name': PowerOnVM_Task, 'duration_secs': 0.489585} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.335019] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 630.335019] env[63345]: INFO nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Took 7.00 seconds to spawn the instance on the hypervisor. [ 630.335019] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 630.335019] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8faaf1cb-cb76-4410-917f-e9f313c68b57 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.402270] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "refresh_cache-28caa5f5-141a-4ef9-abb3-33a1973d99cf" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 630.402270] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquired lock "refresh_cache-28caa5f5-141a-4ef9-abb3-33a1973d99cf" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.402270] env[63345]: DEBUG nova.network.neutron [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 630.629985] env[63345]: DEBUG oslo_concurrency.lockutils [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 3.530s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.630691] env[63345]: ERROR nova.compute.manager [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 22d34ad5-6afb-4378-b16e-db51ac9d8c93, please check neutron logs for more information. [ 630.630691] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Traceback (most recent call last): [ 630.630691] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 630.630691] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] self.driver.spawn(context, instance, image_meta, [ 630.630691] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 630.630691] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 630.630691] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 630.630691] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] vm_ref = self.build_virtual_machine(instance, [ 630.630691] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 630.630691] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] vif_infos = vmwarevif.get_vif_info(self._session, [ 630.630691] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 630.631043] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] for vif in network_info: [ 630.631043] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 630.631043] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] return self._sync_wrapper(fn, *args, **kwargs) [ 630.631043] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 630.631043] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] self.wait() [ 630.631043] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 630.631043] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] self[:] = self._gt.wait() [ 630.631043] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 630.631043] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] return self._exit_event.wait() [ 630.631043] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 630.631043] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] result = hub.switch() [ 630.631043] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 630.631043] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] return self.greenlet.switch() [ 630.631373] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 630.631373] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] result = function(*args, **kwargs) [ 630.631373] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 630.631373] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] return func(*args, **kwargs) [ 630.631373] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 630.631373] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] raise e [ 630.631373] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 630.631373] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] nwinfo = self.network_api.allocate_for_instance( [ 630.631373] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 630.631373] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] created_port_ids = self._update_ports_for_instance( [ 630.631373] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 630.631373] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] with excutils.save_and_reraise_exception(): [ 630.631373] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 630.631701] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] self.force_reraise() [ 630.631701] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 630.631701] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] raise self.value [ 630.631701] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 630.631701] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] updated_port = self._update_port( [ 630.631701] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 630.631701] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] _ensure_no_port_binding_failure(port) [ 630.631701] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 630.631701] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] raise exception.PortBindingFailed(port_id=port['id']) [ 630.631701] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] nova.exception.PortBindingFailed: Binding failed for port 22d34ad5-6afb-4378-b16e-db51ac9d8c93, please check neutron logs for more information. [ 630.631701] env[63345]: ERROR nova.compute.manager [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] [ 630.632465] env[63345]: DEBUG nova.compute.utils [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Binding failed for port 22d34ad5-6afb-4378-b16e-db51ac9d8c93, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 630.634709] env[63345]: DEBUG oslo_concurrency.lockutils [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.616s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.638879] env[63345]: DEBUG nova.compute.manager [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Build of instance e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d was re-scheduled: Binding failed for port 22d34ad5-6afb-4378-b16e-db51ac9d8c93, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 630.639403] env[63345]: DEBUG nova.compute.manager [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 630.639633] env[63345]: DEBUG oslo_concurrency.lockutils [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "refresh_cache-e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 630.639776] env[63345]: DEBUG oslo_concurrency.lockutils [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquired lock "refresh_cache-e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.639931] env[63345]: DEBUG nova.network.neutron [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 630.853851] env[63345]: INFO nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Took 34.02 seconds to build instance. [ 630.952504] env[63345]: DEBUG nova.network.neutron [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 631.058807] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquiring lock "4a59b565-571f-48ef-97bd-bed9853e2d8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.059068] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "4a59b565-571f-48ef-97bd-bed9853e2d8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.160844] env[63345]: DEBUG nova.network.neutron [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Updating instance_info_cache with network_info: [{"id": "973be2f9-7f10-4bd3-996a-1ca7b92520c4", "address": "fa:16:3e:d4:2f:22", "network": {"id": "e1d0fcb4-77e3-4c6a-96ea-178593ec00ef", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1852131446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572be07120b44a488924d794c7db100c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap973be2f9-7f", "ovs_interfaceid": "973be2f9-7f10-4bd3-996a-1ca7b92520c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.179245] env[63345]: DEBUG nova.network.neutron [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 631.281245] env[63345]: DEBUG nova.network.neutron [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.357426] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "ee31689b-bf0b-4737-86c7-5451c763e603" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.230s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.627985] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd30ae3-af1f-4487-aa61-047395d5769f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.640794] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ec1984-07dd-459f-8e5f-2a5cee909d12 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.681432] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Releasing lock "refresh_cache-28caa5f5-141a-4ef9-abb3-33a1973d99cf" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.681432] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Instance network_info: |[{"id": "973be2f9-7f10-4bd3-996a-1ca7b92520c4", "address": "fa:16:3e:d4:2f:22", "network": {"id": "e1d0fcb4-77e3-4c6a-96ea-178593ec00ef", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1852131446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572be07120b44a488924d794c7db100c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap973be2f9-7f", "ovs_interfaceid": "973be2f9-7f10-4bd3-996a-1ca7b92520c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 631.683224] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:2f:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae70d41-6ebf-472a-8504-6530eb37ea41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '973be2f9-7f10-4bd3-996a-1ca7b92520c4', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 631.691583] env[63345]: DEBUG oslo.service.loopingcall [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 631.692456] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf5ec46-43e8-4b8d-9f03-a62b511de80d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.695899] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 631.696673] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-25b39d4a-1e91-4997-b06b-42fef78dc8f8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.720329] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c08131-4a4b-43ff-8969-cb54fe0d59bc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.725076] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 631.725076] env[63345]: value = "task-1016694" [ 631.725076] env[63345]: _type = "Task" [ 631.725076] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.737120] env[63345]: DEBUG nova.compute.provider_tree [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 631.747310] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016694, 'name': CreateVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.790283] env[63345]: DEBUG oslo_concurrency.lockutils [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Releasing lock "refresh_cache-e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.790283] env[63345]: DEBUG nova.compute.manager [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 631.790283] env[63345]: DEBUG nova.compute.manager [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 631.790693] env[63345]: DEBUG nova.network.neutron [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 631.813822] env[63345]: DEBUG nova.network.neutron [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 631.859938] env[63345]: DEBUG nova.compute.manager [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 632.070824] env[63345]: DEBUG nova.compute.manager [req-ca65774a-a610-4006-9beb-a17536fd8856 req-b841e991-bc02-48b5-a646-b422a8d7f388 service nova] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Received event network-changed-973be2f9-7f10-4bd3-996a-1ca7b92520c4 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 632.071032] env[63345]: DEBUG nova.compute.manager [req-ca65774a-a610-4006-9beb-a17536fd8856 req-b841e991-bc02-48b5-a646-b422a8d7f388 service nova] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Refreshing instance network info cache due to event network-changed-973be2f9-7f10-4bd3-996a-1ca7b92520c4. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 632.071252] env[63345]: DEBUG oslo_concurrency.lockutils [req-ca65774a-a610-4006-9beb-a17536fd8856 req-b841e991-bc02-48b5-a646-b422a8d7f388 service nova] Acquiring lock "refresh_cache-28caa5f5-141a-4ef9-abb3-33a1973d99cf" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.071393] env[63345]: DEBUG oslo_concurrency.lockutils [req-ca65774a-a610-4006-9beb-a17536fd8856 req-b841e991-bc02-48b5-a646-b422a8d7f388 service nova] Acquired lock "refresh_cache-28caa5f5-141a-4ef9-abb3-33a1973d99cf" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.071555] env[63345]: DEBUG nova.network.neutron [req-ca65774a-a610-4006-9beb-a17536fd8856 req-b841e991-bc02-48b5-a646-b422a8d7f388 service nova] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Refreshing network info cache for port 973be2f9-7f10-4bd3-996a-1ca7b92520c4 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 632.235794] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016694, 'name': CreateVM_Task, 'duration_secs': 0.412669} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.236189] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 632.236967] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.237289] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.237744] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 632.238103] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbd89de7-35e7-430a-8c54-7dce47b83e0a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.243017] env[63345]: DEBUG nova.scheduler.client.report [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 632.254020] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 632.254020] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52269805-07b0-61e1-05f5-7d07a03508e2" [ 632.254020] env[63345]: _type = "Task" [ 632.254020] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.264978] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52269805-07b0-61e1-05f5-7d07a03508e2, 'name': SearchDatastore_Task, 'duration_secs': 0.010909} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.265403] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.265748] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 632.266193] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.266495] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.266852] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 632.267399] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5cf3549-4494-4871-9f04-db2df6ce363a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.276558] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 632.276740] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 632.277468] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2eb70e65-994e-4dfe-ac7f-3857cbb09760 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.283401] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 632.283401] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5248321e-f908-eb28-4f8a-e5c5de61407d" [ 632.283401] env[63345]: _type = "Task" [ 632.283401] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.291390] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5248321e-f908-eb28-4f8a-e5c5de61407d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.316252] env[63345]: DEBUG nova.network.neutron [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.388500] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.752970] env[63345]: DEBUG oslo_concurrency.lockutils [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.119s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.756604] env[63345]: ERROR nova.compute.manager [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5c1682d3-7aad-4da1-a273-71c9a99cd821, please check neutron logs for more information. [ 632.756604] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Traceback (most recent call last): [ 632.756604] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 632.756604] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] self.driver.spawn(context, instance, image_meta, [ 632.756604] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 632.756604] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] self._vmops.spawn(context, instance, image_meta, injected_files, [ 632.756604] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 632.756604] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] vm_ref = self.build_virtual_machine(instance, [ 632.756604] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 632.756604] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] vif_infos = vmwarevif.get_vif_info(self._session, [ 632.756604] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 632.756994] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] for vif in network_info: [ 632.756994] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 632.756994] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] return self._sync_wrapper(fn, *args, **kwargs) [ 632.756994] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 632.756994] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] self.wait() [ 632.756994] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 632.756994] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] self[:] = self._gt.wait() [ 632.756994] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 632.756994] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] return self._exit_event.wait() [ 632.756994] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 632.756994] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] result = hub.switch() [ 632.756994] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 632.756994] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] return self.greenlet.switch() [ 632.757302] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 632.757302] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] result = function(*args, **kwargs) [ 632.757302] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 632.757302] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] return func(*args, **kwargs) [ 632.757302] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 632.757302] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] raise e [ 632.757302] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 632.757302] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] nwinfo = self.network_api.allocate_for_instance( [ 632.757302] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 632.757302] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] created_port_ids = self._update_ports_for_instance( [ 632.757302] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 632.757302] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] with excutils.save_and_reraise_exception(): [ 632.757302] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.757642] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] self.force_reraise() [ 632.757642] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.757642] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] raise self.value [ 632.757642] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 632.757642] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] updated_port = self._update_port( [ 632.757642] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.757642] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] _ensure_no_port_binding_failure(port) [ 632.757642] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.757642] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] raise exception.PortBindingFailed(port_id=port['id']) [ 632.757642] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] nova.exception.PortBindingFailed: Binding failed for port 5c1682d3-7aad-4da1-a273-71c9a99cd821, please check neutron logs for more information. [ 632.757642] env[63345]: ERROR nova.compute.manager [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] [ 632.757941] env[63345]: DEBUG nova.compute.utils [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Binding failed for port 5c1682d3-7aad-4da1-a273-71c9a99cd821, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 632.757941] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.907s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.764868] env[63345]: DEBUG nova.compute.manager [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Build of instance 079cd9f1-4753-4298-9b06-c3b9925d2982 was re-scheduled: Binding failed for port 5c1682d3-7aad-4da1-a273-71c9a99cd821, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 632.765550] env[63345]: DEBUG nova.compute.manager [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 632.765813] env[63345]: DEBUG oslo_concurrency.lockutils [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "refresh_cache-079cd9f1-4753-4298-9b06-c3b9925d2982" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.765973] env[63345]: DEBUG oslo_concurrency.lockutils [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "refresh_cache-079cd9f1-4753-4298-9b06-c3b9925d2982" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.766182] env[63345]: DEBUG nova.network.neutron [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 632.797851] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5248321e-f908-eb28-4f8a-e5c5de61407d, 'name': SearchDatastore_Task, 'duration_secs': 0.008271} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.799168] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc07c99c-26ea-452f-a846-8b3fd341168e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.808118] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 632.808118] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529cc387-5e5f-97d4-52b9-ee8483c83b22" [ 632.808118] env[63345]: _type = "Task" [ 632.808118] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.820164] env[63345]: INFO nova.compute.manager [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d] Took 1.03 seconds to deallocate network for instance. [ 632.822838] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]529cc387-5e5f-97d4-52b9-ee8483c83b22, 'name': SearchDatastore_Task, 'duration_secs': 0.009676} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.825797] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.826222] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 28caa5f5-141a-4ef9-abb3-33a1973d99cf/28caa5f5-141a-4ef9-abb3-33a1973d99cf.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 632.827098] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-99691978-6dd3-4c2b-949f-997963e7712e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.836252] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 632.836252] env[63345]: value = "task-1016695" [ 632.836252] env[63345]: _type = "Task" [ 632.836252] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.846656] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016695, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.187366] env[63345]: DEBUG nova.network.neutron [req-ca65774a-a610-4006-9beb-a17536fd8856 req-b841e991-bc02-48b5-a646-b422a8d7f388 service nova] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Updated VIF entry in instance network info cache for port 973be2f9-7f10-4bd3-996a-1ca7b92520c4. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 633.187866] env[63345]: DEBUG nova.network.neutron [req-ca65774a-a610-4006-9beb-a17536fd8856 req-b841e991-bc02-48b5-a646-b422a8d7f388 service nova] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Updating instance_info_cache with network_info: [{"id": "973be2f9-7f10-4bd3-996a-1ca7b92520c4", "address": "fa:16:3e:d4:2f:22", "network": {"id": "e1d0fcb4-77e3-4c6a-96ea-178593ec00ef", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1852131446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572be07120b44a488924d794c7db100c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap973be2f9-7f", "ovs_interfaceid": "973be2f9-7f10-4bd3-996a-1ca7b92520c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.309774] env[63345]: DEBUG nova.network.neutron [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 633.359216] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016695, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509493} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.365822] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 28caa5f5-141a-4ef9-abb3-33a1973d99cf/28caa5f5-141a-4ef9-abb3-33a1973d99cf.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 633.366232] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 633.367179] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ac2fb19f-e00b-4271-850e-7fcaa9684ae4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.379905] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 633.379905] env[63345]: value = "task-1016696" [ 633.379905] env[63345]: _type = "Task" [ 633.379905] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.398892] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016696, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.434180] env[63345]: DEBUG nova.network.neutron [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.691898] env[63345]: DEBUG oslo_concurrency.lockutils [req-ca65774a-a610-4006-9beb-a17536fd8856 req-b841e991-bc02-48b5-a646-b422a8d7f388 service nova] Releasing lock "refresh_cache-28caa5f5-141a-4ef9-abb3-33a1973d99cf" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.792442] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a365ad9-b92c-4925-b7a1-a0e72b2b47a6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.800721] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b7a37c8-f555-4db9-a9a7-2d581db4a6a3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.833571] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a59fa71-9cb9-431f-9a70-24f577033158 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.844753] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8e0957-65d2-46b0-8b7c-1d37d1338d24 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.859069] env[63345]: DEBUG nova.compute.provider_tree [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 633.862763] env[63345]: INFO nova.scheduler.client.report [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Deleted allocations for instance e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d [ 633.895497] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016696, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075283} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.895497] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 633.898469] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f228a089-75d7-408e-9afa-ea49bd0dfbdd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.923810] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] 28caa5f5-141a-4ef9-abb3-33a1973d99cf/28caa5f5-141a-4ef9-abb3-33a1973d99cf.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 633.924120] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4344f162-ff52-4eb6-bf97-f029ccd9bccb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.942811] env[63345]: DEBUG oslo_concurrency.lockutils [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "refresh_cache-079cd9f1-4753-4298-9b06-c3b9925d2982" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.943041] env[63345]: DEBUG nova.compute.manager [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 633.943224] env[63345]: DEBUG nova.compute.manager [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 633.943388] env[63345]: DEBUG nova.network.neutron [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 633.946733] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 633.946733] env[63345]: value = "task-1016697" [ 633.946733] env[63345]: _type = "Task" [ 633.946733] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.955429] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016697, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.961806] env[63345]: DEBUG nova.network.neutron [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 634.370111] env[63345]: DEBUG nova.scheduler.client.report [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 634.375874] env[63345]: DEBUG oslo_concurrency.lockutils [None req-230142a3-84f1-43a5-9eb9-4bad4e3f22a1 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "e34dbaf4-05ee-40c0-8f7c-7e629ad6dd1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.535s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.458155] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016697, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.464715] env[63345]: DEBUG nova.network.neutron [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.878013] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.122s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.878691] env[63345]: ERROR nova.compute.manager [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 908a9a32-7233-42bf-a87f-5fe81165e6ac, please check neutron logs for more information. [ 634.878691] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Traceback (most recent call last): [ 634.878691] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 634.878691] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] self.driver.spawn(context, instance, image_meta, [ 634.878691] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 634.878691] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 634.878691] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 634.878691] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] vm_ref = self.build_virtual_machine(instance, [ 634.878691] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 634.878691] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] vif_infos = vmwarevif.get_vif_info(self._session, [ 634.878691] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 634.879046] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] for vif in network_info: [ 634.879046] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 634.879046] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] return self._sync_wrapper(fn, *args, **kwargs) [ 634.879046] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 634.879046] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] self.wait() [ 634.879046] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 634.879046] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] self[:] = self._gt.wait() [ 634.879046] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 634.879046] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] return self._exit_event.wait() [ 634.879046] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 634.879046] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] result = hub.switch() [ 634.879046] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 634.879046] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] return self.greenlet.switch() [ 634.879426] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 634.879426] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] result = function(*args, **kwargs) [ 634.879426] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 634.879426] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] return func(*args, **kwargs) [ 634.879426] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 634.879426] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] raise e [ 634.879426] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 634.879426] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] nwinfo = self.network_api.allocate_for_instance( [ 634.879426] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 634.879426] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] created_port_ids = self._update_ports_for_instance( [ 634.879426] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 634.879426] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] with excutils.save_and_reraise_exception(): [ 634.879426] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 634.879873] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] self.force_reraise() [ 634.879873] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 634.879873] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] raise self.value [ 634.879873] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 634.879873] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] updated_port = self._update_port( [ 634.879873] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 634.879873] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] _ensure_no_port_binding_failure(port) [ 634.879873] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 634.879873] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] raise exception.PortBindingFailed(port_id=port['id']) [ 634.879873] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] nova.exception.PortBindingFailed: Binding failed for port 908a9a32-7233-42bf-a87f-5fe81165e6ac, please check neutron logs for more information. [ 634.879873] env[63345]: ERROR nova.compute.manager [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] [ 634.880190] env[63345]: DEBUG nova.compute.utils [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Binding failed for port 908a9a32-7233-42bf-a87f-5fe81165e6ac, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 634.880722] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.432s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.882168] env[63345]: INFO nova.compute.claims [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 634.884628] env[63345]: DEBUG nova.compute.manager [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 634.887149] env[63345]: DEBUG nova.compute.manager [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Build of instance 2889c4d1-ac1b-404d-a4f7-2b908557348d was re-scheduled: Binding failed for port 908a9a32-7233-42bf-a87f-5fe81165e6ac, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 634.887626] env[63345]: DEBUG nova.compute.manager [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 634.887852] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquiring lock "refresh_cache-2889c4d1-ac1b-404d-a4f7-2b908557348d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.888033] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquired lock "refresh_cache-2889c4d1-ac1b-404d-a4f7-2b908557348d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.888178] env[63345]: DEBUG nova.network.neutron [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 634.958578] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016697, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.966576] env[63345]: INFO nova.compute.manager [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 079cd9f1-4753-4298-9b06-c3b9925d2982] Took 1.02 seconds to deallocate network for instance. [ 635.060799] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Acquiring lock "d3e99100-f13f-4019-9b5a-adaa65dacc5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.061054] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Lock "d3e99100-f13f-4019-9b5a-adaa65dacc5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.427982] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.440139] env[63345]: DEBUG nova.network.neutron [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 635.459855] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016697, 'name': ReconfigVM_Task, 'duration_secs': 1.098912} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.460036] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Reconfigured VM instance instance-0000001a to attach disk [datastore2] 28caa5f5-141a-4ef9-abb3-33a1973d99cf/28caa5f5-141a-4ef9-abb3-33a1973d99cf.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 635.460640] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-618bd718-1869-441b-8c2d-aeb99ffa4cc4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.468083] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 635.468083] env[63345]: value = "task-1016698" [ 635.468083] env[63345]: _type = "Task" [ 635.468083] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.494099] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016698, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.580067] env[63345]: DEBUG nova.network.neutron [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.908228] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f191ab-5184-4089-886d-54352e0df35b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.916432] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22a1b97-8065-4ff6-a308-e054812416d0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.946838] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5cd9223-2073-415a-8bcc-49325bf7d4ee {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.954619] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af374116-b2b9-4375-8a29-9e13c5f8831e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.968963] env[63345]: DEBUG nova.compute.provider_tree [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 635.980670] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016698, 'name': Rename_Task, 'duration_secs': 0.198552} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.981635] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 635.981876] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f7a02cb-8862-4b15-a673-61bd3af9fc52 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.989335] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 635.989335] env[63345]: value = "task-1016699" [ 635.989335] env[63345]: _type = "Task" [ 635.989335] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.995304] env[63345]: INFO nova.scheduler.client.report [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Deleted allocations for instance 079cd9f1-4753-4298-9b06-c3b9925d2982 [ 636.004020] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016699, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.082425] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Releasing lock "refresh_cache-2889c4d1-ac1b-404d-a4f7-2b908557348d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.082832] env[63345]: DEBUG nova.compute.manager [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 636.083140] env[63345]: DEBUG nova.compute.manager [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 636.083434] env[63345]: DEBUG nova.network.neutron [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 636.107675] env[63345]: DEBUG nova.network.neutron [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 636.176922] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "6cbe136b-5bf6-4f17-bcef-b712d850615f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.177318] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "6cbe136b-5bf6-4f17-bcef-b712d850615f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.474716] env[63345]: DEBUG nova.scheduler.client.report [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 636.500587] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016699, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.506203] env[63345]: DEBUG oslo_concurrency.lockutils [None req-85aab4a0-1d2c-4cb7-b057-977c0a911cc2 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "079cd9f1-4753-4298-9b06-c3b9925d2982" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.758s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.611696] env[63345]: DEBUG nova.network.neutron [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.980237] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.099s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.980835] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 636.983421] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.624s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.000529] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016699, 'name': PowerOnVM_Task, 'duration_secs': 0.663667} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.000840] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 637.001062] env[63345]: INFO nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Took 7.88 seconds to spawn the instance on the hypervisor. [ 637.001244] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 637.002122] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ed6bdd-7b72-4043-ba41-2dccc5712e35 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.012013] env[63345]: DEBUG nova.compute.manager [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 637.113650] env[63345]: INFO nova.compute.manager [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 2889c4d1-ac1b-404d-a4f7-2b908557348d] Took 1.03 seconds to deallocate network for instance. [ 637.489537] env[63345]: DEBUG nova.compute.utils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 637.495395] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 637.495395] env[63345]: DEBUG nova.network.neutron [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 637.515214] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "778faa4f-4c5f-4ec2-b17b-5d7513c9c218" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.515437] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "778faa4f-4c5f-4ec2-b17b-5d7513c9c218" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.530290] env[63345]: INFO nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Took 34.06 seconds to build instance. [ 637.546642] env[63345]: DEBUG oslo_concurrency.lockutils [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.567783] env[63345]: DEBUG nova.policy [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '40ee4048c6d843308be28da1a5906015', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '572be07120b44a488924d794c7db100c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 637.914400] env[63345]: DEBUG nova.network.neutron [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Successfully created port: 1634e3f5-396d-4cf4-a5e2-e985d04c1391 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 637.995373] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 638.027947] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117412d8-acbd-4da1-8d9d-7e9a44a1d227 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.035020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "28caa5f5-141a-4ef9-abb3-33a1973d99cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.860s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.041904] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f445b6-5921-44ae-ab35-ee667acddafb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.078112] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef1c8c2-cb81-4492-bf3e-ce92b055db23 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.088111] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23bd946d-4294-4e78-85b2-b993fd47bdec {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.105714] env[63345]: DEBUG nova.compute.provider_tree [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.158329] env[63345]: INFO nova.scheduler.client.report [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Deleted allocations for instance 2889c4d1-ac1b-404d-a4f7-2b908557348d [ 638.536967] env[63345]: DEBUG nova.compute.manager [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 638.609512] env[63345]: DEBUG nova.scheduler.client.report [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 638.673746] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f951fde0-e7d4-4228-ab48-05cbc0ea0cd4 tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Lock "2889c4d1-ac1b-404d-a4f7-2b908557348d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.542s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.005950] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 639.032881] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 639.033156] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 639.033319] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 639.033515] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 639.033666] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 639.033816] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 639.034027] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 639.034207] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 639.034377] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 639.034538] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 639.034711] env[63345]: DEBUG nova.virt.hardware [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 639.035607] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5703776-e10f-433e-9cbc-bde6205b39dd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.047478] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0383cc60-8259-4962-b18c-a6a7966d539b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.065089] env[63345]: DEBUG oslo_concurrency.lockutils [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.114598] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.131s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.115676] env[63345]: ERROR nova.compute.manager [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5e07a532-1360-4ce0-a13a-d9d0b96bbf5d, please check neutron logs for more information. [ 639.115676] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Traceback (most recent call last): [ 639.115676] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 639.115676] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] self.driver.spawn(context, instance, image_meta, [ 639.115676] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 639.115676] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 639.115676] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 639.115676] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] vm_ref = self.build_virtual_machine(instance, [ 639.115676] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 639.115676] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] vif_infos = vmwarevif.get_vif_info(self._session, [ 639.115676] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 639.116049] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] for vif in network_info: [ 639.116049] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 639.116049] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] return self._sync_wrapper(fn, *args, **kwargs) [ 639.116049] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 639.116049] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] self.wait() [ 639.116049] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 639.116049] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] self[:] = self._gt.wait() [ 639.116049] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 639.116049] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] return self._exit_event.wait() [ 639.116049] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 639.116049] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] current.throw(*self._exc) [ 639.116049] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 639.116049] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] result = function(*args, **kwargs) [ 639.116443] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 639.116443] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] return func(*args, **kwargs) [ 639.116443] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 639.116443] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] raise e [ 639.116443] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 639.116443] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] nwinfo = self.network_api.allocate_for_instance( [ 639.116443] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 639.116443] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] created_port_ids = self._update_ports_for_instance( [ 639.116443] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 639.116443] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] with excutils.save_and_reraise_exception(): [ 639.116443] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 639.116443] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] self.force_reraise() [ 639.116443] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 639.116845] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] raise self.value [ 639.116845] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 639.116845] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] updated_port = self._update_port( [ 639.116845] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 639.116845] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] _ensure_no_port_binding_failure(port) [ 639.116845] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 639.116845] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] raise exception.PortBindingFailed(port_id=port['id']) [ 639.116845] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] nova.exception.PortBindingFailed: Binding failed for port 5e07a532-1360-4ce0-a13a-d9d0b96bbf5d, please check neutron logs for more information. [ 639.116845] env[63345]: ERROR nova.compute.manager [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] [ 639.116845] env[63345]: DEBUG nova.compute.utils [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Binding failed for port 5e07a532-1360-4ce0-a13a-d9d0b96bbf5d, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 639.119369] env[63345]: DEBUG oslo_concurrency.lockutils [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.567s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.123244] env[63345]: DEBUG nova.compute.manager [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Build of instance 35a5bd72-403b-467b-ad52-1a1bf4958dbb was re-scheduled: Binding failed for port 5e07a532-1360-4ce0-a13a-d9d0b96bbf5d, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 639.123707] env[63345]: DEBUG nova.compute.manager [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 639.125297] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "refresh_cache-35a5bd72-403b-467b-ad52-1a1bf4958dbb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.125297] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "refresh_cache-35a5bd72-403b-467b-ad52-1a1bf4958dbb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.125297] env[63345]: DEBUG nova.network.neutron [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 639.176239] env[63345]: DEBUG nova.compute.manager [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 639.658200] env[63345]: DEBUG nova.network.neutron [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 639.703617] env[63345]: DEBUG oslo_concurrency.lockutils [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.747844] env[63345]: DEBUG nova.network.neutron [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.096439] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce3bb83-5cdd-4bef-a05b-006426d982b6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.107748] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8bdafe8-aca9-47f2-9298-263dc95c7700 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.152317] env[63345]: DEBUG nova.network.neutron [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Successfully updated port: 1634e3f5-396d-4cf4-a5e2-e985d04c1391 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 640.158952] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d19cdf63-24cf-4606-abb7-6ea2f0786fdc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.175476] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea5971a-76c6-4c80-9b0a-57cf157fd804 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.180832] env[63345]: DEBUG nova.compute.manager [req-b7c7660b-ce37-453f-bf99-ff5720010ef7 req-b63a93e3-9b49-47f9-83fc-79605b9b7752 service nova] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Received event network-vif-plugged-1634e3f5-396d-4cf4-a5e2-e985d04c1391 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 640.181090] env[63345]: DEBUG oslo_concurrency.lockutils [req-b7c7660b-ce37-453f-bf99-ff5720010ef7 req-b63a93e3-9b49-47f9-83fc-79605b9b7752 service nova] Acquiring lock "b4a7d6dd-98dc-49d8-b344-1878cd5a3f51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.181348] env[63345]: DEBUG oslo_concurrency.lockutils [req-b7c7660b-ce37-453f-bf99-ff5720010ef7 req-b63a93e3-9b49-47f9-83fc-79605b9b7752 service nova] Lock "b4a7d6dd-98dc-49d8-b344-1878cd5a3f51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.181522] env[63345]: DEBUG oslo_concurrency.lockutils [req-b7c7660b-ce37-453f-bf99-ff5720010ef7 req-b63a93e3-9b49-47f9-83fc-79605b9b7752 service nova] Lock "b4a7d6dd-98dc-49d8-b344-1878cd5a3f51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.181718] env[63345]: DEBUG nova.compute.manager [req-b7c7660b-ce37-453f-bf99-ff5720010ef7 req-b63a93e3-9b49-47f9-83fc-79605b9b7752 service nova] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] No waiting events found dispatching network-vif-plugged-1634e3f5-396d-4cf4-a5e2-e985d04c1391 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 640.181911] env[63345]: WARNING nova.compute.manager [req-b7c7660b-ce37-453f-bf99-ff5720010ef7 req-b63a93e3-9b49-47f9-83fc-79605b9b7752 service nova] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Received unexpected event network-vif-plugged-1634e3f5-396d-4cf4-a5e2-e985d04c1391 for instance with vm_state building and task_state spawning. [ 640.195282] env[63345]: DEBUG nova.compute.provider_tree [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 640.250936] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "refresh_cache-35a5bd72-403b-467b-ad52-1a1bf4958dbb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.251206] env[63345]: DEBUG nova.compute.manager [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 640.251390] env[63345]: DEBUG nova.compute.manager [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 640.251563] env[63345]: DEBUG nova.network.neutron [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 640.271850] env[63345]: DEBUG nova.network.neutron [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 640.668019] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "refresh_cache-b4a7d6dd-98dc-49d8-b344-1878cd5a3f51" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.668212] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquired lock "refresh_cache-b4a7d6dd-98dc-49d8-b344-1878cd5a3f51" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.668362] env[63345]: DEBUG nova.network.neutron [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 640.698630] env[63345]: DEBUG nova.scheduler.client.report [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 640.774690] env[63345]: DEBUG nova.network.neutron [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.203741] env[63345]: DEBUG oslo_concurrency.lockutils [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.085s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.204380] env[63345]: ERROR nova.compute.manager [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5013d56d-b041-4790-9c2f-7f0c6d71ec30, please check neutron logs for more information. [ 641.204380] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Traceback (most recent call last): [ 641.204380] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 641.204380] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] self.driver.spawn(context, instance, image_meta, [ 641.204380] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 641.204380] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] self._vmops.spawn(context, instance, image_meta, injected_files, [ 641.204380] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 641.204380] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] vm_ref = self.build_virtual_machine(instance, [ 641.204380] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 641.204380] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] vif_infos = vmwarevif.get_vif_info(self._session, [ 641.204380] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 641.204742] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] for vif in network_info: [ 641.204742] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 641.204742] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] return self._sync_wrapper(fn, *args, **kwargs) [ 641.204742] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 641.204742] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] self.wait() [ 641.204742] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 641.204742] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] self[:] = self._gt.wait() [ 641.204742] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 641.204742] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] return self._exit_event.wait() [ 641.204742] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 641.204742] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] current.throw(*self._exc) [ 641.204742] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 641.204742] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] result = function(*args, **kwargs) [ 641.205222] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 641.205222] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] return func(*args, **kwargs) [ 641.205222] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 641.205222] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] raise e [ 641.205222] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 641.205222] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] nwinfo = self.network_api.allocate_for_instance( [ 641.205222] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 641.205222] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] created_port_ids = self._update_ports_for_instance( [ 641.205222] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 641.205222] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] with excutils.save_and_reraise_exception(): [ 641.205222] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 641.205222] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] self.force_reraise() [ 641.205222] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 641.205553] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] raise self.value [ 641.205553] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 641.205553] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] updated_port = self._update_port( [ 641.205553] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 641.205553] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] _ensure_no_port_binding_failure(port) [ 641.205553] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 641.205553] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] raise exception.PortBindingFailed(port_id=port['id']) [ 641.205553] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] nova.exception.PortBindingFailed: Binding failed for port 5013d56d-b041-4790-9c2f-7f0c6d71ec30, please check neutron logs for more information. [ 641.205553] env[63345]: ERROR nova.compute.manager [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] [ 641.205553] env[63345]: DEBUG nova.compute.utils [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Binding failed for port 5013d56d-b041-4790-9c2f-7f0c6d71ec30, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 641.207046] env[63345]: DEBUG nova.compute.manager [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Build of instance fb2cdca8-441c-4edb-be11-6b89c19b3cad was re-scheduled: Binding failed for port 5013d56d-b041-4790-9c2f-7f0c6d71ec30, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 641.207586] env[63345]: DEBUG nova.compute.manager [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 641.207831] env[63345]: DEBUG oslo_concurrency.lockutils [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Acquiring lock "refresh_cache-fb2cdca8-441c-4edb-be11-6b89c19b3cad" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.208838] env[63345]: DEBUG oslo_concurrency.lockutils [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Acquired lock "refresh_cache-fb2cdca8-441c-4edb-be11-6b89c19b3cad" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.209055] env[63345]: DEBUG nova.network.neutron [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 641.213554] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.588s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.224813] env[63345]: DEBUG nova.network.neutron [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 641.277457] env[63345]: INFO nova.compute.manager [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 35a5bd72-403b-467b-ad52-1a1bf4958dbb] Took 1.03 seconds to deallocate network for instance. [ 641.458013] env[63345]: DEBUG nova.network.neutron [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Updating instance_info_cache with network_info: [{"id": "1634e3f5-396d-4cf4-a5e2-e985d04c1391", "address": "fa:16:3e:71:a2:39", "network": {"id": "e1d0fcb4-77e3-4c6a-96ea-178593ec00ef", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1852131446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572be07120b44a488924d794c7db100c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1634e3f5-39", "ovs_interfaceid": "1634e3f5-396d-4cf4-a5e2-e985d04c1391", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.746150] env[63345]: DEBUG nova.network.neutron [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 641.852252] env[63345]: DEBUG nova.network.neutron [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.960102] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Releasing lock "refresh_cache-b4a7d6dd-98dc-49d8-b344-1878cd5a3f51" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.960448] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Instance network_info: |[{"id": "1634e3f5-396d-4cf4-a5e2-e985d04c1391", "address": "fa:16:3e:71:a2:39", "network": {"id": "e1d0fcb4-77e3-4c6a-96ea-178593ec00ef", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1852131446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572be07120b44a488924d794c7db100c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1634e3f5-39", "ovs_interfaceid": "1634e3f5-396d-4cf4-a5e2-e985d04c1391", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 641.960876] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:a2:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae70d41-6ebf-472a-8504-6530eb37ea41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1634e3f5-396d-4cf4-a5e2-e985d04c1391', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 641.971488] env[63345]: DEBUG oslo.service.loopingcall [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 641.971831] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 641.972296] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c1069cd-b05f-461a-aabf-b94adb355774 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.997555] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 641.997555] env[63345]: value = "task-1016700" [ 641.997555] env[63345]: _type = "Task" [ 641.997555] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.010964] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016700, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.200547] env[63345]: DEBUG nova.compute.manager [req-40f4546c-cb09-4d3e-ac9b-d2d032e3f402 req-081a9fa1-ae14-4c53-99fd-c0faf20c6f6d service nova] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Received event network-changed-1634e3f5-396d-4cf4-a5e2-e985d04c1391 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 642.200547] env[63345]: DEBUG nova.compute.manager [req-40f4546c-cb09-4d3e-ac9b-d2d032e3f402 req-081a9fa1-ae14-4c53-99fd-c0faf20c6f6d service nova] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Refreshing instance network info cache due to event network-changed-1634e3f5-396d-4cf4-a5e2-e985d04c1391. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 642.200547] env[63345]: DEBUG oslo_concurrency.lockutils [req-40f4546c-cb09-4d3e-ac9b-d2d032e3f402 req-081a9fa1-ae14-4c53-99fd-c0faf20c6f6d service nova] Acquiring lock "refresh_cache-b4a7d6dd-98dc-49d8-b344-1878cd5a3f51" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.200547] env[63345]: DEBUG oslo_concurrency.lockutils [req-40f4546c-cb09-4d3e-ac9b-d2d032e3f402 req-081a9fa1-ae14-4c53-99fd-c0faf20c6f6d service nova] Acquired lock "refresh_cache-b4a7d6dd-98dc-49d8-b344-1878cd5a3f51" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.200547] env[63345]: DEBUG nova.network.neutron [req-40f4546c-cb09-4d3e-ac9b-d2d032e3f402 req-081a9fa1-ae14-4c53-99fd-c0faf20c6f6d service nova] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Refreshing network info cache for port 1634e3f5-396d-4cf4-a5e2-e985d04c1391 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 642.208905] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c6a986-dd11-4105-b34f-57a67f752f2a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.218455] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8571c9c-7954-4ff4-bbef-9322dbc34699 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.250063] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1420d8ec-a7b3-486e-95f5-a5428b5572b2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.258449] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677c5bc1-a73e-4c2a-a9f3-bfeb4c38b0af {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.271551] env[63345]: DEBUG nova.compute.provider_tree [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 642.316355] env[63345]: INFO nova.scheduler.client.report [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleted allocations for instance 35a5bd72-403b-467b-ad52-1a1bf4958dbb [ 642.360337] env[63345]: DEBUG oslo_concurrency.lockutils [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Releasing lock "refresh_cache-fb2cdca8-441c-4edb-be11-6b89c19b3cad" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.361576] env[63345]: DEBUG nova.compute.manager [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 642.361972] env[63345]: DEBUG nova.compute.manager [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 642.362179] env[63345]: DEBUG nova.network.neutron [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 642.393953] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquiring lock "93112cc1-f9a1-4188-9555-bddf483426a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.394211] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Lock "93112cc1-f9a1-4188-9555-bddf483426a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.401871] env[63345]: DEBUG nova.network.neutron [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 642.508935] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016700, 'name': CreateVM_Task, 'duration_secs': 0.341953} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.508935] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 642.510214] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.511234] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.511234] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 642.511234] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83238c84-3fb2-45d7-b6d8-5f4229737ce4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.516124] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 642.516124] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d03e7f-9128-5c96-d4ca-8c642f7fc9ae" [ 642.516124] env[63345]: _type = "Task" [ 642.516124] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.523940] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d03e7f-9128-5c96-d4ca-8c642f7fc9ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.774785] env[63345]: DEBUG nova.scheduler.client.report [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 642.828645] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28a9f8ee-561e-42c1-a81b-2f1cf60def7e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "35a5bd72-403b-467b-ad52-1a1bf4958dbb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.325s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.907946] env[63345]: DEBUG nova.network.neutron [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.958435] env[63345]: DEBUG nova.network.neutron [req-40f4546c-cb09-4d3e-ac9b-d2d032e3f402 req-081a9fa1-ae14-4c53-99fd-c0faf20c6f6d service nova] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Updated VIF entry in instance network info cache for port 1634e3f5-396d-4cf4-a5e2-e985d04c1391. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 642.958782] env[63345]: DEBUG nova.network.neutron [req-40f4546c-cb09-4d3e-ac9b-d2d032e3f402 req-081a9fa1-ae14-4c53-99fd-c0faf20c6f6d service nova] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Updating instance_info_cache with network_info: [{"id": "1634e3f5-396d-4cf4-a5e2-e985d04c1391", "address": "fa:16:3e:71:a2:39", "network": {"id": "e1d0fcb4-77e3-4c6a-96ea-178593ec00ef", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1852131446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572be07120b44a488924d794c7db100c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1634e3f5-39", "ovs_interfaceid": "1634e3f5-396d-4cf4-a5e2-e985d04c1391", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.028156] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d03e7f-9128-5c96-d4ca-8c642f7fc9ae, 'name': SearchDatastore_Task, 'duration_secs': 0.00976} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.028477] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.028716] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 643.028970] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.029144] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.029314] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 643.029656] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6726b7f3-5750-48cd-93fd-decac328640a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.038388] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 643.038593] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 643.039328] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d6b9ce8-00bc-4863-8d70-e4b0d0089e03 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.046623] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 643.046623] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5229e87f-d6c6-2f41-51af-e879117ce249" [ 643.046623] env[63345]: _type = "Task" [ 643.046623] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.054949] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5229e87f-d6c6-2f41-51af-e879117ce249, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.281904] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.068s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.282555] env[63345]: ERROR nova.compute.manager [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3d199751-8956-48ca-b152-f05509099c33, please check neutron logs for more information. [ 643.282555] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Traceback (most recent call last): [ 643.282555] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 643.282555] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] self.driver.spawn(context, instance, image_meta, [ 643.282555] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 643.282555] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 643.282555] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 643.282555] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] vm_ref = self.build_virtual_machine(instance, [ 643.282555] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 643.282555] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] vif_infos = vmwarevif.get_vif_info(self._session, [ 643.282555] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 643.282976] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] for vif in network_info: [ 643.282976] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 643.282976] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] return self._sync_wrapper(fn, *args, **kwargs) [ 643.282976] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 643.282976] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] self.wait() [ 643.282976] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 643.282976] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] self[:] = self._gt.wait() [ 643.282976] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 643.282976] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] return self._exit_event.wait() [ 643.282976] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 643.282976] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] current.throw(*self._exc) [ 643.282976] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 643.282976] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] result = function(*args, **kwargs) [ 643.283406] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 643.283406] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] return func(*args, **kwargs) [ 643.283406] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 643.283406] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] raise e [ 643.283406] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 643.283406] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] nwinfo = self.network_api.allocate_for_instance( [ 643.283406] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 643.283406] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] created_port_ids = self._update_ports_for_instance( [ 643.283406] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 643.283406] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] with excutils.save_and_reraise_exception(): [ 643.283406] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.283406] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] self.force_reraise() [ 643.283406] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.283825] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] raise self.value [ 643.283825] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 643.283825] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] updated_port = self._update_port( [ 643.283825] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.283825] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] _ensure_no_port_binding_failure(port) [ 643.283825] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.283825] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] raise exception.PortBindingFailed(port_id=port['id']) [ 643.283825] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] nova.exception.PortBindingFailed: Binding failed for port 3d199751-8956-48ca-b152-f05509099c33, please check neutron logs for more information. [ 643.283825] env[63345]: ERROR nova.compute.manager [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] [ 643.283825] env[63345]: DEBUG nova.compute.utils [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Binding failed for port 3d199751-8956-48ca-b152-f05509099c33, please check neutron logs for more information. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 643.284617] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 18.319s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.284777] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.284931] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63345) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 643.285226] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.091s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.287052] env[63345]: INFO nova.compute.claims [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 643.289883] env[63345]: DEBUG nova.compute.manager [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Build of instance 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd was re-scheduled: Binding failed for port 3d199751-8956-48ca-b152-f05509099c33, please check neutron logs for more information. {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 643.290327] env[63345]: DEBUG nova.compute.manager [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Unplugging VIFs for instance {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 643.290616] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Acquiring lock "refresh_cache-56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.290771] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Acquired lock "refresh_cache-56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.290929] env[63345]: DEBUG nova.network.neutron [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 643.292900] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb6a96c-849d-4ce1-8773-7c33472b8d4b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.301547] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f388ba-6a58-43a3-8c20-5269cf6c6091 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.316979] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f90c7af-5dc4-407a-acb6-79fb72086380 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.324164] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d506005-1a89-42c2-a584-70fba9ba2cbf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.356957] env[63345]: DEBUG nova.compute.manager [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 643.360317] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181551MB free_disk=187GB free_vcpus=48 pci_devices=None {{(pid=63345) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 643.360485] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.410777] env[63345]: INFO nova.compute.manager [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] [instance: fb2cdca8-441c-4edb-be11-6b89c19b3cad] Took 1.05 seconds to deallocate network for instance. [ 643.461246] env[63345]: DEBUG oslo_concurrency.lockutils [req-40f4546c-cb09-4d3e-ac9b-d2d032e3f402 req-081a9fa1-ae14-4c53-99fd-c0faf20c6f6d service nova] Releasing lock "refresh_cache-b4a7d6dd-98dc-49d8-b344-1878cd5a3f51" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.557392] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5229e87f-d6c6-2f41-51af-e879117ce249, 'name': SearchDatastore_Task, 'duration_secs': 0.009027} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.557872] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae22e76b-bcb1-4269-bfe9-efe389a3f223 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.563445] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 643.563445] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52ff2edd-400d-310c-5535-5a1e318e95b0" [ 643.563445] env[63345]: _type = "Task" [ 643.563445] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.572184] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52ff2edd-400d-310c-5535-5a1e318e95b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.817366] env[63345]: DEBUG nova.network.neutron [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 643.885633] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.929235] env[63345]: DEBUG nova.network.neutron [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.078410] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52ff2edd-400d-310c-5535-5a1e318e95b0, 'name': SearchDatastore_Task, 'duration_secs': 0.010066} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.078689] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.078950] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] b4a7d6dd-98dc-49d8-b344-1878cd5a3f51/b4a7d6dd-98dc-49d8-b344-1878cd5a3f51.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 644.079219] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6031da44-fd5d-496a-a835-0efe4a8d2f56 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.087786] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 644.087786] env[63345]: value = "task-1016701" [ 644.087786] env[63345]: _type = "Task" [ 644.087786] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.096833] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016701, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.433306] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Releasing lock "refresh_cache-56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.433580] env[63345]: DEBUG nova.compute.manager [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63345) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 644.433780] env[63345]: DEBUG nova.compute.manager [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 644.433987] env[63345]: DEBUG nova.network.neutron [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 644.447334] env[63345]: INFO nova.scheduler.client.report [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Deleted allocations for instance fb2cdca8-441c-4edb-be11-6b89c19b3cad [ 644.459977] env[63345]: DEBUG nova.network.neutron [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 644.603363] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016701, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503719} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.604959] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] b4a7d6dd-98dc-49d8-b344-1878cd5a3f51/b4a7d6dd-98dc-49d8-b344-1878cd5a3f51.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 644.605209] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 644.605637] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-71eb7fe6-7f90-47c7-8e41-e0dd974a1cfd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.615784] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 644.615784] env[63345]: value = "task-1016702" [ 644.615784] env[63345]: _type = "Task" [ 644.615784] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.630832] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016702, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.770961] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9dd3fa7-2f99-4d74-a256-40b23ae542de {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.778725] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa0c884-b10d-43a9-ad78-632bc2623160 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.809941] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d85097-72eb-4624-91b3-90c1ddf0a4e4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.818112] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e41e4f5c-0ce8-4739-9ac1-3edcf3dbd635 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.835204] env[63345]: DEBUG nova.compute.provider_tree [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.960897] env[63345]: DEBUG oslo_concurrency.lockutils [None req-40c46a23-51d1-4974-99d7-63d301a98173 tempest-ServerActionsTestJSON-1881851479 tempest-ServerActionsTestJSON-1881851479-project-member] Lock "fb2cdca8-441c-4edb-be11-6b89c19b3cad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.878s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.961223] env[63345]: DEBUG nova.network.neutron [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.125614] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016702, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064725} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.125614] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 645.125614] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413dd30c-bf08-4077-a16f-2ac8b4412f37 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.151044] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] b4a7d6dd-98dc-49d8-b344-1878cd5a3f51/b4a7d6dd-98dc-49d8-b344-1878cd5a3f51.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 645.151044] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96056fc7-56d8-40ac-84c2-f3054f92a44d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.173794] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 645.173794] env[63345]: value = "task-1016703" [ 645.173794] env[63345]: _type = "Task" [ 645.173794] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.185825] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016703, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.329758] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "e3d52cbd-e768-4425-b83e-180a6e58fd00" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.330140] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "e3d52cbd-e768-4425-b83e-180a6e58fd00" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.338669] env[63345]: DEBUG nova.scheduler.client.report [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 645.465122] env[63345]: INFO nova.compute.manager [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] [instance: 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd] Took 1.03 seconds to deallocate network for instance. [ 645.469035] env[63345]: DEBUG nova.compute.manager [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 645.683507] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016703, 'name': ReconfigVM_Task, 'duration_secs': 0.298424} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.683801] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Reconfigured VM instance instance-0000001b to attach disk [datastore2] b4a7d6dd-98dc-49d8-b344-1878cd5a3f51/b4a7d6dd-98dc-49d8-b344-1878cd5a3f51.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 645.684427] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ebcec4e-0584-47a7-b555-17c1638459e0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.692079] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 645.692079] env[63345]: value = "task-1016704" [ 645.692079] env[63345]: _type = "Task" [ 645.692079] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.700734] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016704, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.848069] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.563s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.848609] env[63345]: DEBUG nova.compute.manager [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 645.856594] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.468s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.862310] env[63345]: INFO nova.compute.claims [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 645.999182] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.204754] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016704, 'name': Rename_Task, 'duration_secs': 0.477291} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.205012] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 646.206575] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f7e24d76-3bb8-4576-9568-f26161184e8c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.214703] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 646.214703] env[63345]: value = "task-1016705" [ 646.214703] env[63345]: _type = "Task" [ 646.214703] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.225181] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016705, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.358879] env[63345]: DEBUG nova.compute.utils [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 646.360585] env[63345]: DEBUG nova.compute.manager [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 646.360696] env[63345]: DEBUG nova.network.neutron [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 646.431402] env[63345]: DEBUG nova.policy [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '56441dfd40fa467da376ce828d48f331', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be4b8982dd144c969cb530f52ed9297b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 646.508393] env[63345]: INFO nova.scheduler.client.report [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Deleted allocations for instance 56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd [ 646.726193] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016705, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.863637] env[63345]: DEBUG nova.compute.manager [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 647.021046] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2c11686-f3e2-427b-a111-6c510c529d42 tempest-DeleteServersAdminTestJSON-132821844 tempest-DeleteServersAdminTestJSON-132821844-project-member] Lock "56fa1d2e-9883-4ccf-8f74-9c66ceadb8cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.053s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.088895] env[63345]: DEBUG nova.network.neutron [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Successfully created port: 35f00929-4dc1-4515-b0de-19a6377c68ca {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 647.227234] env[63345]: DEBUG oslo_vmware.api [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016705, 'name': PowerOnVM_Task, 'duration_secs': 0.540197} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.227450] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 647.227604] env[63345]: INFO nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Took 8.22 seconds to spawn the instance on the hypervisor. [ 647.227786] env[63345]: DEBUG nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 647.230845] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35a1c86-e117-4d9e-8533-0df46285c412 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.348036] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a9f41f-eb37-4880-bf96-d37d5567a9c8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.356072] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a007aa37-54a9-4614-bf87-08ee4271ba55 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.389472] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a51d64c6-d363-4ee0-bc58-bb8387e0966b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.398378] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8339875-0738-4131-bd9f-339eafb2b64b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.414982] env[63345]: DEBUG nova.compute.provider_tree [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 647.523362] env[63345]: DEBUG nova.compute.manager [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 647.749242] env[63345]: INFO nova.compute.manager [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Took 34.32 seconds to build instance. [ 647.894240] env[63345]: DEBUG nova.compute.manager [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 647.917955] env[63345]: DEBUG nova.scheduler.client.report [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 647.928975] env[63345]: DEBUG nova.virt.hardware [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 647.929257] env[63345]: DEBUG nova.virt.hardware [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 647.929417] env[63345]: DEBUG nova.virt.hardware [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 647.929627] env[63345]: DEBUG nova.virt.hardware [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 647.929784] env[63345]: DEBUG nova.virt.hardware [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 647.929935] env[63345]: DEBUG nova.virt.hardware [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 647.930155] env[63345]: DEBUG nova.virt.hardware [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 647.930318] env[63345]: DEBUG nova.virt.hardware [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 647.930495] env[63345]: DEBUG nova.virt.hardware [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 647.930682] env[63345]: DEBUG nova.virt.hardware [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 647.930858] env[63345]: DEBUG nova.virt.hardware [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 647.931743] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef54cda-1af4-4bbd-8d43-9599cc7dca89 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.941258] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654caf0a-53d3-4470-8930-3c154068d6b7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.042436] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.252172] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62207973-440a-4a9b-9375-fd9ca3ce79cf tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "b4a7d6dd-98dc-49d8-b344-1878cd5a3f51" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.022s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.424962] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.568s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.425504] env[63345]: DEBUG nova.compute.manager [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 648.428149] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.429711] env[63345]: INFO nova.compute.claims [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 648.756038] env[63345]: DEBUG nova.compute.manager [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 648.818267] env[63345]: DEBUG nova.compute.manager [req-6b99d38a-7e1e-4c73-947c-8da317658bb1 req-b8c28b8c-fe6a-4f20-a4c0-0328a2aac1ba service nova] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Received event network-vif-plugged-35f00929-4dc1-4515-b0de-19a6377c68ca {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 648.818459] env[63345]: DEBUG oslo_concurrency.lockutils [req-6b99d38a-7e1e-4c73-947c-8da317658bb1 req-b8c28b8c-fe6a-4f20-a4c0-0328a2aac1ba service nova] Acquiring lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.818671] env[63345]: DEBUG oslo_concurrency.lockutils [req-6b99d38a-7e1e-4c73-947c-8da317658bb1 req-b8c28b8c-fe6a-4f20-a4c0-0328a2aac1ba service nova] Lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.818843] env[63345]: DEBUG oslo_concurrency.lockutils [req-6b99d38a-7e1e-4c73-947c-8da317658bb1 req-b8c28b8c-fe6a-4f20-a4c0-0328a2aac1ba service nova] Lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.819048] env[63345]: DEBUG nova.compute.manager [req-6b99d38a-7e1e-4c73-947c-8da317658bb1 req-b8c28b8c-fe6a-4f20-a4c0-0328a2aac1ba service nova] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] No waiting events found dispatching network-vif-plugged-35f00929-4dc1-4515-b0de-19a6377c68ca {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 648.819191] env[63345]: WARNING nova.compute.manager [req-6b99d38a-7e1e-4c73-947c-8da317658bb1 req-b8c28b8c-fe6a-4f20-a4c0-0328a2aac1ba service nova] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Received unexpected event network-vif-plugged-35f00929-4dc1-4515-b0de-19a6377c68ca for instance with vm_state building and task_state spawning. [ 648.907353] env[63345]: DEBUG nova.network.neutron [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Successfully updated port: 35f00929-4dc1-4515-b0de-19a6377c68ca {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 648.934804] env[63345]: DEBUG nova.compute.utils [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 648.943075] env[63345]: DEBUG nova.compute.manager [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 648.943313] env[63345]: DEBUG nova.network.neutron [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 648.990890] env[63345]: DEBUG nova.policy [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f5e4206f50cb41a3a8d89b2141faf0f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd719045ef1a9467c894e5a62510eb701', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 649.008927] env[63345]: DEBUG oslo_concurrency.lockutils [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "ee31689b-bf0b-4737-86c7-5451c763e603" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.008927] env[63345]: DEBUG oslo_concurrency.lockutils [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "ee31689b-bf0b-4737-86c7-5451c763e603" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.008927] env[63345]: DEBUG oslo_concurrency.lockutils [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "ee31689b-bf0b-4737-86c7-5451c763e603-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.008927] env[63345]: DEBUG oslo_concurrency.lockutils [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "ee31689b-bf0b-4737-86c7-5451c763e603-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.009178] env[63345]: DEBUG oslo_concurrency.lockutils [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "ee31689b-bf0b-4737-86c7-5451c763e603-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.013401] env[63345]: INFO nova.compute.manager [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Terminating instance [ 649.281411] env[63345]: DEBUG oslo_concurrency.lockutils [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.409721] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "refresh_cache-bc9d2e6a-f77a-4a21-90bc-81949cbfce91" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.409898] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquired lock "refresh_cache-bc9d2e6a-f77a-4a21-90bc-81949cbfce91" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.410071] env[63345]: DEBUG nova.network.neutron [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 649.441341] env[63345]: DEBUG nova.compute.manager [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 649.521252] env[63345]: DEBUG nova.compute.manager [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 649.521472] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 649.522363] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670ace6c-cc02-4dc8-83d6-7425f051acb7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.535721] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 649.535985] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c7c6d61d-b6d9-417d-be6a-9f3bdf70cbe0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.547138] env[63345]: DEBUG oslo_vmware.api [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 649.547138] env[63345]: value = "task-1016706" [ 649.547138] env[63345]: _type = "Task" [ 649.547138] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.550666] env[63345]: DEBUG nova.network.neutron [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Successfully created port: d9fdd10f-fc7f-4c46-9538-13164c07d369 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 649.565217] env[63345]: DEBUG oslo_vmware.api [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016706, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.957215] env[63345]: DEBUG nova.network.neutron [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 649.964328] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9b2e15-af03-4fdc-b076-a1922666e929 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.974444] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ea45fb-f574-432b-a7ca-2e38f01280fe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.015965] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3de6ce-ea78-4ece-924e-2275d9f22a68 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.026664] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5106e42c-1b7f-4beb-a4c0-1ee11c02f2f7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.041850] env[63345]: DEBUG nova.compute.provider_tree [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 650.061107] env[63345]: DEBUG oslo_vmware.api [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016706, 'name': PowerOffVM_Task, 'duration_secs': 0.272143} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.066560] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 650.066560] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 650.066974] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e2e19e84-7f3b-40fc-a83d-ed3ff7eadf72 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.140879] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 650.141145] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 650.141315] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Deleting the datastore file [datastore2] ee31689b-bf0b-4737-86c7-5451c763e603 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 650.141606] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a96af24-0a35-4b5b-8d7a-6e6adf22c222 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.151319] env[63345]: DEBUG oslo_vmware.api [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 650.151319] env[63345]: value = "task-1016708" [ 650.151319] env[63345]: _type = "Task" [ 650.151319] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.161234] env[63345]: DEBUG oslo_vmware.api [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016708, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.219035] env[63345]: DEBUG nova.network.neutron [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Updating instance_info_cache with network_info: [{"id": "35f00929-4dc1-4515-b0de-19a6377c68ca", "address": "fa:16:3e:9e:c6:f2", "network": {"id": "5159b9e8-dfb2-472c-bec6-f963867f9baf", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-134143484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be4b8982dd144c969cb530f52ed9297b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35f00929-4d", "ovs_interfaceid": "35f00929-4dc1-4515-b0de-19a6377c68ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.455636] env[63345]: DEBUG nova.compute.manager [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 650.507108] env[63345]: DEBUG nova.virt.hardware [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 650.507363] env[63345]: DEBUG nova.virt.hardware [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 650.507538] env[63345]: DEBUG nova.virt.hardware [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 650.507778] env[63345]: DEBUG nova.virt.hardware [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 650.507909] env[63345]: DEBUG nova.virt.hardware [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 650.511074] env[63345]: DEBUG nova.virt.hardware [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 650.511074] env[63345]: DEBUG nova.virt.hardware [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 650.511074] env[63345]: DEBUG nova.virt.hardware [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 650.511074] env[63345]: DEBUG nova.virt.hardware [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 650.511074] env[63345]: DEBUG nova.virt.hardware [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 650.511246] env[63345]: DEBUG nova.virt.hardware [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 650.511246] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882f354b-f6a9-4632-aa21-d4f04438381c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.518365] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcffb943-fd3c-4222-babc-8742db5d6f15 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.544559] env[63345]: DEBUG nova.scheduler.client.report [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 650.664612] env[63345]: DEBUG oslo_vmware.api [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016708, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145918} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.664864] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 650.666008] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 650.666207] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 650.666431] env[63345]: INFO nova.compute.manager [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Took 1.14 seconds to destroy the instance on the hypervisor. [ 650.666650] env[63345]: DEBUG oslo.service.loopingcall [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 650.666818] env[63345]: DEBUG nova.compute.manager [-] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 650.666915] env[63345]: DEBUG nova.network.neutron [-] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 650.725113] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Releasing lock "refresh_cache-bc9d2e6a-f77a-4a21-90bc-81949cbfce91" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.725544] env[63345]: DEBUG nova.compute.manager [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Instance network_info: |[{"id": "35f00929-4dc1-4515-b0de-19a6377c68ca", "address": "fa:16:3e:9e:c6:f2", "network": {"id": "5159b9e8-dfb2-472c-bec6-f963867f9baf", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-134143484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be4b8982dd144c969cb530f52ed9297b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35f00929-4d", "ovs_interfaceid": "35f00929-4dc1-4515-b0de-19a6377c68ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 650.725905] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:c6:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31ac3fea-ebf4-4bed-bf70-1eaecdf71280', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '35f00929-4dc1-4515-b0de-19a6377c68ca', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 650.734251] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Creating folder: Project (be4b8982dd144c969cb530f52ed9297b). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 650.734445] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01de54b5-a629-4f3e-8431-1230f0d08214 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.749897] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Created folder: Project (be4b8982dd144c969cb530f52ed9297b) in parent group-v225918. [ 650.750256] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Creating folder: Instances. Parent ref: group-v225943. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 650.750339] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5353c5e2-2780-42bc-af0a-ff5c059c2d54 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.760021] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Created folder: Instances in parent group-v225943. [ 650.760339] env[63345]: DEBUG oslo.service.loopingcall [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 650.760454] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 650.760642] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc080886-47de-474a-bd1f-2bafd07bfcfc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.782438] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 650.782438] env[63345]: value = "task-1016711" [ 650.782438] env[63345]: _type = "Task" [ 650.782438] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.790064] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016711, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.844118] env[63345]: DEBUG nova.compute.manager [req-af1f904c-63ff-458e-a545-33644a310ce8 req-a8fcbc5b-747c-41e1-ae8d-d0d735262634 service nova] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Received event network-changed-35f00929-4dc1-4515-b0de-19a6377c68ca {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 650.844159] env[63345]: DEBUG nova.compute.manager [req-af1f904c-63ff-458e-a545-33644a310ce8 req-a8fcbc5b-747c-41e1-ae8d-d0d735262634 service nova] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Refreshing instance network info cache due to event network-changed-35f00929-4dc1-4515-b0de-19a6377c68ca. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 650.844613] env[63345]: DEBUG oslo_concurrency.lockutils [req-af1f904c-63ff-458e-a545-33644a310ce8 req-a8fcbc5b-747c-41e1-ae8d-d0d735262634 service nova] Acquiring lock "refresh_cache-bc9d2e6a-f77a-4a21-90bc-81949cbfce91" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.844613] env[63345]: DEBUG oslo_concurrency.lockutils [req-af1f904c-63ff-458e-a545-33644a310ce8 req-a8fcbc5b-747c-41e1-ae8d-d0d735262634 service nova] Acquired lock "refresh_cache-bc9d2e6a-f77a-4a21-90bc-81949cbfce91" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.844726] env[63345]: DEBUG nova.network.neutron [req-af1f904c-63ff-458e-a545-33644a310ce8 req-a8fcbc5b-747c-41e1-ae8d-d0d735262634 service nova] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Refreshing network info cache for port 35f00929-4dc1-4515-b0de-19a6377c68ca {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 651.055179] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.624s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.055179] env[63345]: DEBUG nova.compute.manager [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 651.057273] env[63345]: DEBUG oslo_concurrency.lockutils [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.511s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.059389] env[63345]: INFO nova.compute.claims [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 651.142195] env[63345]: DEBUG nova.compute.manager [req-327152c9-42b8-4bf6-8467-aa4b15c0746d req-2b1b0cb0-7102-42eb-bffa-055dd99c9b98 service nova] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Received event network-vif-deleted-8dd82e3c-9961-493e-82fc-5ccb8542af34 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 651.142463] env[63345]: INFO nova.compute.manager [req-327152c9-42b8-4bf6-8467-aa4b15c0746d req-2b1b0cb0-7102-42eb-bffa-055dd99c9b98 service nova] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Neutron deleted interface 8dd82e3c-9961-493e-82fc-5ccb8542af34; detaching it from the instance and deleting it from the info cache [ 651.143020] env[63345]: DEBUG nova.network.neutron [req-327152c9-42b8-4bf6-8467-aa4b15c0746d req-2b1b0cb0-7102-42eb-bffa-055dd99c9b98 service nova] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.292979] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016711, 'name': CreateVM_Task, 'duration_secs': 0.351944} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.293336] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 651.294194] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 651.294445] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.298023] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 651.298023] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f13dfc44-0e0d-418f-8690-d10c5c216ce6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.302260] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 651.302260] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f683d7-3e59-ff74-a153-6fc3b3bda258" [ 651.302260] env[63345]: _type = "Task" [ 651.302260] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.310703] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f683d7-3e59-ff74-a153-6fc3b3bda258, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.565277] env[63345]: DEBUG nova.compute.utils [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 651.568923] env[63345]: DEBUG nova.compute.manager [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 651.569113] env[63345]: DEBUG nova.network.neutron [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 651.609167] env[63345]: DEBUG nova.network.neutron [-] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.645843] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6da8dfd-5d4c-4b08-b4be-f917c2cc4183 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.656404] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fda7dd6-30a4-406c-bc4f-a50bd7775154 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.676682] env[63345]: DEBUG nova.policy [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a82ae00173042a2a363390f0f75ede5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '621255ab615842da94141f5949873177', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 651.686361] env[63345]: DEBUG nova.compute.manager [req-327152c9-42b8-4bf6-8467-aa4b15c0746d req-2b1b0cb0-7102-42eb-bffa-055dd99c9b98 service nova] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Detach interface failed, port_id=8dd82e3c-9961-493e-82fc-5ccb8542af34, reason: Instance ee31689b-bf0b-4737-86c7-5451c763e603 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 651.728363] env[63345]: DEBUG nova.network.neutron [req-af1f904c-63ff-458e-a545-33644a310ce8 req-a8fcbc5b-747c-41e1-ae8d-d0d735262634 service nova] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Updated VIF entry in instance network info cache for port 35f00929-4dc1-4515-b0de-19a6377c68ca. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 651.731971] env[63345]: DEBUG nova.network.neutron [req-af1f904c-63ff-458e-a545-33644a310ce8 req-a8fcbc5b-747c-41e1-ae8d-d0d735262634 service nova] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Updating instance_info_cache with network_info: [{"id": "35f00929-4dc1-4515-b0de-19a6377c68ca", "address": "fa:16:3e:9e:c6:f2", "network": {"id": "5159b9e8-dfb2-472c-bec6-f963867f9baf", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-134143484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be4b8982dd144c969cb530f52ed9297b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35f00929-4d", "ovs_interfaceid": "35f00929-4dc1-4515-b0de-19a6377c68ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.782498] env[63345]: DEBUG nova.network.neutron [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Successfully updated port: d9fdd10f-fc7f-4c46-9538-13164c07d369 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 651.815502] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f683d7-3e59-ff74-a153-6fc3b3bda258, 'name': SearchDatastore_Task, 'duration_secs': 0.0105} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.815815] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.816065] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 651.816301] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 651.816451] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.816627] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 651.817186] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-28801d99-721e-410a-8eb4-00cf846e2582 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.826910] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 651.827115] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 651.827844] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d903fe5-bca4-453a-91cd-737e6b9b9deb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.834042] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 651.834042] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5214e863-42db-7166-0400-9be7b12519f5" [ 651.834042] env[63345]: _type = "Task" [ 651.834042] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.843368] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5214e863-42db-7166-0400-9be7b12519f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.069968] env[63345]: DEBUG nova.compute.manager [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 652.113430] env[63345]: INFO nova.compute.manager [-] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Took 1.45 seconds to deallocate network for instance. [ 652.232692] env[63345]: DEBUG oslo_concurrency.lockutils [req-af1f904c-63ff-458e-a545-33644a310ce8 req-a8fcbc5b-747c-41e1-ae8d-d0d735262634 service nova] Releasing lock "refresh_cache-bc9d2e6a-f77a-4a21-90bc-81949cbfce91" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.283182] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Acquiring lock "refresh_cache-04fd7aaa-658d-480d-8465-825f120477bc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.283404] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Acquired lock "refresh_cache-04fd7aaa-658d-480d-8465-825f120477bc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.283618] env[63345]: DEBUG nova.network.neutron [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 652.351695] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5214e863-42db-7166-0400-9be7b12519f5, 'name': SearchDatastore_Task, 'duration_secs': 0.009764} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.354630] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-491b4b1f-45b6-44b3-9478-7fafe89a0a37 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.361255] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 652.361255] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5231159c-4edf-5ecc-c040-5f4d6397a0f8" [ 652.361255] env[63345]: _type = "Task" [ 652.361255] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.375376] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5231159c-4edf-5ecc-c040-5f4d6397a0f8, 'name': SearchDatastore_Task, 'duration_secs': 0.00968} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.375714] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.376059] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] bc9d2e6a-f77a-4a21-90bc-81949cbfce91/bc9d2e6a-f77a-4a21-90bc-81949cbfce91.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 652.376380] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5bb31cdf-a444-4c23-b277-05ce1d85566d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.385905] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 652.385905] env[63345]: value = "task-1016712" [ 652.385905] env[63345]: _type = "Task" [ 652.385905] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.398189] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1016712, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.401664] env[63345]: DEBUG nova.network.neutron [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Successfully created port: 6fb848af-6632-4cdf-847d-138fe30c4a08 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 652.602697] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fde42fe-10c6-4cdf-93da-d1f217b92b02 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.611971] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c822be8-63c4-4dda-a01a-97ecbecc78b7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.621420] env[63345]: DEBUG oslo_concurrency.lockutils [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.654935] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb84955-7014-4a58-b514-ace31ef4ffcc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.665016] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d38e61-1d1f-4b16-9de5-cb320801a949 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.683257] env[63345]: DEBUG nova.compute.provider_tree [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 652.885871] env[63345]: DEBUG nova.network.neutron [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 652.900647] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1016712, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.082204] env[63345]: DEBUG nova.compute.manager [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 653.110852] env[63345]: DEBUG nova.virt.hardware [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 653.111151] env[63345]: DEBUG nova.virt.hardware [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 653.111319] env[63345]: DEBUG nova.virt.hardware [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 653.111506] env[63345]: DEBUG nova.virt.hardware [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 653.111654] env[63345]: DEBUG nova.virt.hardware [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 653.111809] env[63345]: DEBUG nova.virt.hardware [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 653.112016] env[63345]: DEBUG nova.virt.hardware [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 653.112848] env[63345]: DEBUG nova.virt.hardware [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 653.113076] env[63345]: DEBUG nova.virt.hardware [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 653.113259] env[63345]: DEBUG nova.virt.hardware [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 653.113437] env[63345]: DEBUG nova.virt.hardware [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 653.114313] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9f4100-32ee-47b0-924d-501fae848c0e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.127517] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e303e22f-cc31-4a83-b4be-1a21b35fd5ed {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.186240] env[63345]: DEBUG nova.scheduler.client.report [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 653.191315] env[63345]: DEBUG nova.network.neutron [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Updating instance_info_cache with network_info: [{"id": "d9fdd10f-fc7f-4c46-9538-13164c07d369", "address": "fa:16:3e:f9:7e:36", "network": {"id": "dc71bdbf-a78f-4a08-8536-38e560997d01", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-443964394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d719045ef1a9467c894e5a62510eb701", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9fdd10f-fc", "ovs_interfaceid": "d9fdd10f-fc7f-4c46-9538-13164c07d369", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.197104] env[63345]: DEBUG nova.compute.manager [req-ea6b4901-1fd4-4b0f-ad0e-b2ba8579ec69 req-f3572b2a-f773-4131-975d-3972f2cff490 service nova] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Received event network-vif-plugged-d9fdd10f-fc7f-4c46-9538-13164c07d369 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 653.197338] env[63345]: DEBUG oslo_concurrency.lockutils [req-ea6b4901-1fd4-4b0f-ad0e-b2ba8579ec69 req-f3572b2a-f773-4131-975d-3972f2cff490 service nova] Acquiring lock "04fd7aaa-658d-480d-8465-825f120477bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.197595] env[63345]: DEBUG oslo_concurrency.lockutils [req-ea6b4901-1fd4-4b0f-ad0e-b2ba8579ec69 req-f3572b2a-f773-4131-975d-3972f2cff490 service nova] Lock "04fd7aaa-658d-480d-8465-825f120477bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.197786] env[63345]: DEBUG oslo_concurrency.lockutils [req-ea6b4901-1fd4-4b0f-ad0e-b2ba8579ec69 req-f3572b2a-f773-4131-975d-3972f2cff490 service nova] Lock "04fd7aaa-658d-480d-8465-825f120477bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.197958] env[63345]: DEBUG nova.compute.manager [req-ea6b4901-1fd4-4b0f-ad0e-b2ba8579ec69 req-f3572b2a-f773-4131-975d-3972f2cff490 service nova] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] No waiting events found dispatching network-vif-plugged-d9fdd10f-fc7f-4c46-9538-13164c07d369 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 653.198143] env[63345]: WARNING nova.compute.manager [req-ea6b4901-1fd4-4b0f-ad0e-b2ba8579ec69 req-f3572b2a-f773-4131-975d-3972f2cff490 service nova] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Received unexpected event network-vif-plugged-d9fdd10f-fc7f-4c46-9538-13164c07d369 for instance with vm_state building and task_state spawning. [ 653.198305] env[63345]: DEBUG nova.compute.manager [req-ea6b4901-1fd4-4b0f-ad0e-b2ba8579ec69 req-f3572b2a-f773-4131-975d-3972f2cff490 service nova] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Received event network-changed-d9fdd10f-fc7f-4c46-9538-13164c07d369 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 653.198459] env[63345]: DEBUG nova.compute.manager [req-ea6b4901-1fd4-4b0f-ad0e-b2ba8579ec69 req-f3572b2a-f773-4131-975d-3972f2cff490 service nova] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Refreshing instance network info cache due to event network-changed-d9fdd10f-fc7f-4c46-9538-13164c07d369. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 653.198628] env[63345]: DEBUG oslo_concurrency.lockutils [req-ea6b4901-1fd4-4b0f-ad0e-b2ba8579ec69 req-f3572b2a-f773-4131-975d-3972f2cff490 service nova] Acquiring lock "refresh_cache-04fd7aaa-658d-480d-8465-825f120477bc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.400952] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1016712, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529539} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.401511] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] bc9d2e6a-f77a-4a21-90bc-81949cbfce91/bc9d2e6a-f77a-4a21-90bc-81949cbfce91.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 653.401751] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 653.402114] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-448e888d-6c31-42c8-bf4f-cb20ba0eec09 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.410254] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 653.410254] env[63345]: value = "task-1016713" [ 653.410254] env[63345]: _type = "Task" [ 653.410254] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.421886] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1016713, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.694232] env[63345]: DEBUG oslo_concurrency.lockutils [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.637s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.694778] env[63345]: DEBUG nova.compute.manager [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 653.699069] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Releasing lock "refresh_cache-04fd7aaa-658d-480d-8465-825f120477bc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.699362] env[63345]: DEBUG nova.compute.manager [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Instance network_info: |[{"id": "d9fdd10f-fc7f-4c46-9538-13164c07d369", "address": "fa:16:3e:f9:7e:36", "network": {"id": "dc71bdbf-a78f-4a08-8536-38e560997d01", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-443964394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d719045ef1a9467c894e5a62510eb701", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9fdd10f-fc", "ovs_interfaceid": "d9fdd10f-fc7f-4c46-9538-13164c07d369", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 653.699899] env[63345]: DEBUG oslo_concurrency.lockutils [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.635s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.701295] env[63345]: INFO nova.compute.claims [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 653.705359] env[63345]: DEBUG oslo_concurrency.lockutils [req-ea6b4901-1fd4-4b0f-ad0e-b2ba8579ec69 req-f3572b2a-f773-4131-975d-3972f2cff490 service nova] Acquired lock "refresh_cache-04fd7aaa-658d-480d-8465-825f120477bc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.705549] env[63345]: DEBUG nova.network.neutron [req-ea6b4901-1fd4-4b0f-ad0e-b2ba8579ec69 req-f3572b2a-f773-4131-975d-3972f2cff490 service nova] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Refreshing network info cache for port d9fdd10f-fc7f-4c46-9538-13164c07d369 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 653.706570] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:7e:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78e1ebb0-0130-446b-bf73-a0e59bbb95cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd9fdd10f-fc7f-4c46-9538-13164c07d369', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 653.714011] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Creating folder: Project (d719045ef1a9467c894e5a62510eb701). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 653.717306] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7fcaedd4-ca22-423e-9faf-3253d8a7cead {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.730335] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Created folder: Project (d719045ef1a9467c894e5a62510eb701) in parent group-v225918. [ 653.730897] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Creating folder: Instances. Parent ref: group-v225946. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 653.730897] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25a73d28-27d2-45ef-af33-28ca695e275e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.744170] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Created folder: Instances in parent group-v225946. [ 653.744170] env[63345]: DEBUG oslo.service.loopingcall [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 653.744170] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 653.744628] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3faf0f2-0e7b-4fd7-9607-c32146d00438 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.776053] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 653.776053] env[63345]: value = "task-1016716" [ 653.776053] env[63345]: _type = "Task" [ 653.776053] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.784100] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016716, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.922891] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1016713, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070099} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.923469] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 653.924470] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26f559a-6a3d-4661-b205-83a23618ea5f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.950793] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] bc9d2e6a-f77a-4a21-90bc-81949cbfce91/bc9d2e6a-f77a-4a21-90bc-81949cbfce91.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 653.951426] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ec1ffb0-ee39-405b-bd03-d3c86d17a832 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.975590] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 653.975590] env[63345]: value = "task-1016717" [ 653.975590] env[63345]: _type = "Task" [ 653.975590] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.985280] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1016717, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.104988] env[63345]: DEBUG nova.network.neutron [req-ea6b4901-1fd4-4b0f-ad0e-b2ba8579ec69 req-f3572b2a-f773-4131-975d-3972f2cff490 service nova] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Updated VIF entry in instance network info cache for port d9fdd10f-fc7f-4c46-9538-13164c07d369. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 654.105298] env[63345]: DEBUG nova.network.neutron [req-ea6b4901-1fd4-4b0f-ad0e-b2ba8579ec69 req-f3572b2a-f773-4131-975d-3972f2cff490 service nova] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Updating instance_info_cache with network_info: [{"id": "d9fdd10f-fc7f-4c46-9538-13164c07d369", "address": "fa:16:3e:f9:7e:36", "network": {"id": "dc71bdbf-a78f-4a08-8536-38e560997d01", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-443964394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d719045ef1a9467c894e5a62510eb701", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9fdd10f-fc", "ovs_interfaceid": "d9fdd10f-fc7f-4c46-9538-13164c07d369", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.203222] env[63345]: DEBUG nova.compute.utils [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 654.204374] env[63345]: DEBUG nova.compute.manager [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 654.204560] env[63345]: DEBUG nova.network.neutron [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 654.284041] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016716, 'name': CreateVM_Task, 'duration_secs': 0.396033} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.284222] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 654.284886] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.285067] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.285406] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 654.285656] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77edc925-3490-482d-8490-9c5265f4ab39 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.290358] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Waiting for the task: (returnval){ [ 654.290358] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52664403-eabc-a1da-1da9-d56cce0272a8" [ 654.290358] env[63345]: _type = "Task" [ 654.290358] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.301809] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52664403-eabc-a1da-1da9-d56cce0272a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.326546] env[63345]: DEBUG nova.policy [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17d05413415247e784585aaa367481eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '80677040e91647d9afae9c71c48ed3f0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 654.492943] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1016717, 'name': ReconfigVM_Task, 'duration_secs': 0.297594} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.493614] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Reconfigured VM instance instance-0000001c to attach disk [datastore2] bc9d2e6a-f77a-4a21-90bc-81949cbfce91/bc9d2e6a-f77a-4a21-90bc-81949cbfce91.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 654.495153] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7113da9d-c9bc-4705-b345-1fe178947943 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.504662] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 654.504662] env[63345]: value = "task-1016718" [ 654.504662] env[63345]: _type = "Task" [ 654.504662] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.517487] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1016718, 'name': Rename_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.608110] env[63345]: DEBUG oslo_concurrency.lockutils [req-ea6b4901-1fd4-4b0f-ad0e-b2ba8579ec69 req-f3572b2a-f773-4131-975d-3972f2cff490 service nova] Releasing lock "refresh_cache-04fd7aaa-658d-480d-8465-825f120477bc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.705672] env[63345]: DEBUG nova.compute.manager [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 654.801459] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52664403-eabc-a1da-1da9-d56cce0272a8, 'name': SearchDatastore_Task, 'duration_secs': 0.008404} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.801992] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.802380] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 654.802772] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.803077] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.803407] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 654.803780] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d32b217-4ecd-4ce8-b52a-11d09d80197a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.812173] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 654.812482] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 654.813486] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4a96ffc-1cde-4506-bfd0-2c516039c4dc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.821335] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Waiting for the task: (returnval){ [ 654.821335] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52800797-76ef-b1e2-a5d0-52bb5a08a2c5" [ 654.821335] env[63345]: _type = "Task" [ 654.821335] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.827672] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52800797-76ef-b1e2-a5d0-52bb5a08a2c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.888055] env[63345]: DEBUG nova.network.neutron [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Successfully updated port: 6fb848af-6632-4cdf-847d-138fe30c4a08 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 654.966234] env[63345]: DEBUG nova.network.neutron [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Successfully created port: 8c1bd582-6867-4cba-9522-0e03560fa3f7 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 655.016293] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1016718, 'name': Rename_Task, 'duration_secs': 0.143031} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.018749] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 655.019193] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d5da2f92-bcca-4d4c-97df-6856f369a582 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.030146] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 655.030146] env[63345]: value = "task-1016719" [ 655.030146] env[63345]: _type = "Task" [ 655.030146] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.048861] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1016719, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.175872] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115af6ee-52aa-4b8a-be8f-096148197531 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.184134] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e633b1c-ce80-4021-b585-6f55ece73fad {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.222092] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d9e377-5f9d-458b-928b-b9fa030fd16c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.233171] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc50c73-2179-4ad3-946d-f1fa6378cc9d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.248777] env[63345]: DEBUG nova.compute.provider_tree [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 655.328980] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52800797-76ef-b1e2-a5d0-52bb5a08a2c5, 'name': SearchDatastore_Task, 'duration_secs': 0.008651} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.329855] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47c141a4-9a66-4148-b34b-0e5359af0718 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.335300] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Waiting for the task: (returnval){ [ 655.335300] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52ba586b-cff3-e855-7d11-0175b0c7760f" [ 655.335300] env[63345]: _type = "Task" [ 655.335300] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.343296] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52ba586b-cff3-e855-7d11-0175b0c7760f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.392112] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Acquiring lock "refresh_cache-46d3332a-bfb9-4812-8201-a87467ce5151" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.392325] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Acquired lock "refresh_cache-46d3332a-bfb9-4812-8201-a87467ce5151" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.392645] env[63345]: DEBUG nova.network.neutron [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 655.503985] env[63345]: DEBUG nova.compute.manager [req-b6d995bf-2dc1-49bd-8bb9-8aa21a266398 req-cdcbe7ed-c0a7-4f6b-a959-9222349e8a66 service nova] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Received event network-vif-plugged-6fb848af-6632-4cdf-847d-138fe30c4a08 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 655.504466] env[63345]: DEBUG oslo_concurrency.lockutils [req-b6d995bf-2dc1-49bd-8bb9-8aa21a266398 req-cdcbe7ed-c0a7-4f6b-a959-9222349e8a66 service nova] Acquiring lock "46d3332a-bfb9-4812-8201-a87467ce5151-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.504982] env[63345]: DEBUG oslo_concurrency.lockutils [req-b6d995bf-2dc1-49bd-8bb9-8aa21a266398 req-cdcbe7ed-c0a7-4f6b-a959-9222349e8a66 service nova] Lock "46d3332a-bfb9-4812-8201-a87467ce5151-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.505826] env[63345]: DEBUG oslo_concurrency.lockutils [req-b6d995bf-2dc1-49bd-8bb9-8aa21a266398 req-cdcbe7ed-c0a7-4f6b-a959-9222349e8a66 service nova] Lock "46d3332a-bfb9-4812-8201-a87467ce5151-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.506067] env[63345]: DEBUG nova.compute.manager [req-b6d995bf-2dc1-49bd-8bb9-8aa21a266398 req-cdcbe7ed-c0a7-4f6b-a959-9222349e8a66 service nova] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] No waiting events found dispatching network-vif-plugged-6fb848af-6632-4cdf-847d-138fe30c4a08 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 655.506283] env[63345]: WARNING nova.compute.manager [req-b6d995bf-2dc1-49bd-8bb9-8aa21a266398 req-cdcbe7ed-c0a7-4f6b-a959-9222349e8a66 service nova] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Received unexpected event network-vif-plugged-6fb848af-6632-4cdf-847d-138fe30c4a08 for instance with vm_state building and task_state spawning. [ 655.506485] env[63345]: DEBUG nova.compute.manager [req-b6d995bf-2dc1-49bd-8bb9-8aa21a266398 req-cdcbe7ed-c0a7-4f6b-a959-9222349e8a66 service nova] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Received event network-changed-6fb848af-6632-4cdf-847d-138fe30c4a08 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 655.506676] env[63345]: DEBUG nova.compute.manager [req-b6d995bf-2dc1-49bd-8bb9-8aa21a266398 req-cdcbe7ed-c0a7-4f6b-a959-9222349e8a66 service nova] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Refreshing instance network info cache due to event network-changed-6fb848af-6632-4cdf-847d-138fe30c4a08. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 655.506882] env[63345]: DEBUG oslo_concurrency.lockutils [req-b6d995bf-2dc1-49bd-8bb9-8aa21a266398 req-cdcbe7ed-c0a7-4f6b-a959-9222349e8a66 service nova] Acquiring lock "refresh_cache-46d3332a-bfb9-4812-8201-a87467ce5151" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.543798] env[63345]: DEBUG oslo_vmware.api [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1016719, 'name': PowerOnVM_Task, 'duration_secs': 0.453243} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.546102] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 655.546102] env[63345]: INFO nova.compute.manager [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Took 7.65 seconds to spawn the instance on the hypervisor. [ 655.546102] env[63345]: DEBUG nova.compute.manager [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 655.546102] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797b10a5-d2b7-48dc-9d5d-f28afd5abf49 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.726134] env[63345]: DEBUG nova.compute.manager [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 655.750445] env[63345]: DEBUG nova.virt.hardware [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='e0f196a9-2434-4e97-8d5f-115ba2c65179',id=36,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-506807631',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 655.750445] env[63345]: DEBUG nova.virt.hardware [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 655.750630] env[63345]: DEBUG nova.virt.hardware [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 655.750729] env[63345]: DEBUG nova.virt.hardware [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 655.750927] env[63345]: DEBUG nova.virt.hardware [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 655.751025] env[63345]: DEBUG nova.virt.hardware [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 655.751297] env[63345]: DEBUG nova.virt.hardware [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 655.751374] env[63345]: DEBUG nova.virt.hardware [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 655.751538] env[63345]: DEBUG nova.virt.hardware [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 655.751694] env[63345]: DEBUG nova.virt.hardware [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 655.751865] env[63345]: DEBUG nova.virt.hardware [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 655.753035] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf2257f-c4e4-490a-86a3-fdecbe1e5871 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.755916] env[63345]: DEBUG nova.scheduler.client.report [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 655.764637] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f77bb500-77af-4c59-a930-037a32ef10a7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.847249] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52ba586b-cff3-e855-7d11-0175b0c7760f, 'name': SearchDatastore_Task, 'duration_secs': 0.011883} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.847552] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.847811] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 04fd7aaa-658d-480d-8465-825f120477bc/04fd7aaa-658d-480d-8465-825f120477bc.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 655.848084] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f9551d4-fccf-453b-b3f1-073e95ed7dda {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.855097] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Waiting for the task: (returnval){ [ 655.855097] env[63345]: value = "task-1016720" [ 655.855097] env[63345]: _type = "Task" [ 655.855097] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.863704] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': task-1016720, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.952637] env[63345]: DEBUG nova.network.neutron [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 655.982366] env[63345]: DEBUG oslo_concurrency.lockutils [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "3a85df04-3997-48a3-8992-f24fe997b3cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.983355] env[63345]: DEBUG oslo_concurrency.lockutils [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "3a85df04-3997-48a3-8992-f24fe997b3cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.065955] env[63345]: INFO nova.compute.manager [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Took 26.89 seconds to build instance. [ 656.246595] env[63345]: DEBUG nova.network.neutron [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Updating instance_info_cache with network_info: [{"id": "6fb848af-6632-4cdf-847d-138fe30c4a08", "address": "fa:16:3e:c8:a8:d4", "network": {"id": "0a7828e5-642e-4e9f-af2d-cf4de69f3d8a", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-2048304741-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "621255ab615842da94141f5949873177", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fb848af-66", "ovs_interfaceid": "6fb848af-6632-4cdf-847d-138fe30c4a08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.261483] env[63345]: DEBUG oslo_concurrency.lockutils [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.561s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 656.262625] env[63345]: DEBUG nova.compute.manager [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 656.265339] env[63345]: DEBUG oslo_concurrency.lockutils [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.562s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.267391] env[63345]: INFO nova.compute.claims [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 656.365691] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': task-1016720, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499456} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.365959] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 04fd7aaa-658d-480d-8465-825f120477bc/04fd7aaa-658d-480d-8465-825f120477bc.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 656.366561] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 656.366812] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2f5f402-36e3-4c9b-a95c-4bf32047f044 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.372529] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Waiting for the task: (returnval){ [ 656.372529] env[63345]: value = "task-1016721" [ 656.372529] env[63345]: _type = "Task" [ 656.372529] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.380406] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': task-1016721, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.567700] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bef5acc5-8266-4275-a649-08f6e9726b94 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.551s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 656.680027] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "b5173471-3367-42ba-b450-62ad8573f048" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.680547] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "b5173471-3367-42ba-b450-62ad8573f048" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.750472] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Releasing lock "refresh_cache-46d3332a-bfb9-4812-8201-a87467ce5151" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.751592] env[63345]: DEBUG nova.compute.manager [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Instance network_info: |[{"id": "6fb848af-6632-4cdf-847d-138fe30c4a08", "address": "fa:16:3e:c8:a8:d4", "network": {"id": "0a7828e5-642e-4e9f-af2d-cf4de69f3d8a", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-2048304741-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "621255ab615842da94141f5949873177", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fb848af-66", "ovs_interfaceid": "6fb848af-6632-4cdf-847d-138fe30c4a08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 656.754017] env[63345]: DEBUG oslo_concurrency.lockutils [req-b6d995bf-2dc1-49bd-8bb9-8aa21a266398 req-cdcbe7ed-c0a7-4f6b-a959-9222349e8a66 service nova] Acquired lock "refresh_cache-46d3332a-bfb9-4812-8201-a87467ce5151" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.754017] env[63345]: DEBUG nova.network.neutron [req-b6d995bf-2dc1-49bd-8bb9-8aa21a266398 req-cdcbe7ed-c0a7-4f6b-a959-9222349e8a66 service nova] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Refreshing network info cache for port 6fb848af-6632-4cdf-847d-138fe30c4a08 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 656.754017] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:a8:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6fb848af-6632-4cdf-847d-138fe30c4a08', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 656.762384] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Creating folder: Project (621255ab615842da94141f5949873177). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 656.765590] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3969d31-458c-4f0b-842f-39e87513e73b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.776017] env[63345]: DEBUG nova.compute.utils [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 656.778793] env[63345]: DEBUG nova.compute.manager [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 656.779116] env[63345]: DEBUG nova.network.neutron [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 656.781437] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Created folder: Project (621255ab615842da94141f5949873177) in parent group-v225918. [ 656.781860] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Creating folder: Instances. Parent ref: group-v225949. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 656.782226] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ee2196e9-fdf2-4d76-b694-76550b71be3b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.796027] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Created folder: Instances in parent group-v225949. [ 656.796027] env[63345]: DEBUG oslo.service.loopingcall [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 656.796027] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 656.796027] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ec73c17-f819-4412-a2e6-2232534501fd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.816021] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 656.816021] env[63345]: value = "task-1016724" [ 656.816021] env[63345]: _type = "Task" [ 656.816021] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.825176] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016724, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.826736] env[63345]: DEBUG nova.policy [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c62ea716472f45249517ae2b1318607c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ac5c2a653dae436c97514507939c4e3c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 656.882223] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': task-1016721, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06585} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.882708] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 656.883662] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0831b46e-faf6-42d4-9a14-40e7cbc98bba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.907626] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] 04fd7aaa-658d-480d-8465-825f120477bc/04fd7aaa-658d-480d-8465-825f120477bc.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 656.908148] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1066ab27-f699-4cea-97c3-f0a529166524 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.931024] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Waiting for the task: (returnval){ [ 656.931024] env[63345]: value = "task-1016725" [ 656.931024] env[63345]: _type = "Task" [ 656.931024] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.940698] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': task-1016725, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.074884] env[63345]: DEBUG nova.compute.manager [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 657.123913] env[63345]: DEBUG nova.network.neutron [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Successfully created port: cf06de95-5747-4226-b66c-b9ccca47321d {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 657.218944] env[63345]: DEBUG nova.network.neutron [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Successfully updated port: 8c1bd582-6867-4cba-9522-0e03560fa3f7 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 657.278836] env[63345]: DEBUG nova.compute.manager [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 657.336300] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016724, 'name': CreateVM_Task, 'duration_secs': 0.379659} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.336300] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 657.337567] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.337567] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.337567] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 657.337764] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1866fac4-6965-47c7-8c9e-9a1e3e15c55e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.346388] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Waiting for the task: (returnval){ [ 657.346388] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]528b533b-4e48-e414-a078-71b114959528" [ 657.346388] env[63345]: _type = "Task" [ 657.346388] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.357173] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]528b533b-4e48-e414-a078-71b114959528, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.376823] env[63345]: DEBUG nova.network.neutron [req-b6d995bf-2dc1-49bd-8bb9-8aa21a266398 req-cdcbe7ed-c0a7-4f6b-a959-9222349e8a66 service nova] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Updated VIF entry in instance network info cache for port 6fb848af-6632-4cdf-847d-138fe30c4a08. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 657.377320] env[63345]: DEBUG nova.network.neutron [req-b6d995bf-2dc1-49bd-8bb9-8aa21a266398 req-cdcbe7ed-c0a7-4f6b-a959-9222349e8a66 service nova] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Updating instance_info_cache with network_info: [{"id": "6fb848af-6632-4cdf-847d-138fe30c4a08", "address": "fa:16:3e:c8:a8:d4", "network": {"id": "0a7828e5-642e-4e9f-af2d-cf4de69f3d8a", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-2048304741-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "621255ab615842da94141f5949873177", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fb848af-66", "ovs_interfaceid": "6fb848af-6632-4cdf-847d-138fe30c4a08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.444956] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': task-1016725, 'name': ReconfigVM_Task, 'duration_secs': 0.369736} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.445492] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Reconfigured VM instance instance-0000001d to attach disk [datastore2] 04fd7aaa-658d-480d-8465-825f120477bc/04fd7aaa-658d-480d-8465-825f120477bc.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 657.446242] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f38b8f12-4e46-40db-8c18-116e5d5d1d3b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.458668] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Waiting for the task: (returnval){ [ 657.458668] env[63345]: value = "task-1016726" [ 657.458668] env[63345]: _type = "Task" [ 657.458668] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.466801] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': task-1016726, 'name': Rename_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.600164] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.727418] env[63345]: DEBUG oslo_concurrency.lockutils [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "refresh_cache-27c6dc17-4ded-4fe7-8fba-265eae64fc32" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.727418] env[63345]: DEBUG oslo_concurrency.lockutils [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquired lock "refresh_cache-27c6dc17-4ded-4fe7-8fba-265eae64fc32" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.727418] env[63345]: DEBUG nova.network.neutron [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 657.766508] env[63345]: DEBUG nova.compute.manager [req-a90cef90-eea7-4dca-b297-f25e93f238ae req-aeba1185-cb33-466c-a95e-c83dbc584ff7 service nova] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Received event network-vif-plugged-8c1bd582-6867-4cba-9522-0e03560fa3f7 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 657.766736] env[63345]: DEBUG oslo_concurrency.lockutils [req-a90cef90-eea7-4dca-b297-f25e93f238ae req-aeba1185-cb33-466c-a95e-c83dbc584ff7 service nova] Acquiring lock "27c6dc17-4ded-4fe7-8fba-265eae64fc32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.766979] env[63345]: DEBUG oslo_concurrency.lockutils [req-a90cef90-eea7-4dca-b297-f25e93f238ae req-aeba1185-cb33-466c-a95e-c83dbc584ff7 service nova] Lock "27c6dc17-4ded-4fe7-8fba-265eae64fc32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.767129] env[63345]: DEBUG oslo_concurrency.lockutils [req-a90cef90-eea7-4dca-b297-f25e93f238ae req-aeba1185-cb33-466c-a95e-c83dbc584ff7 service nova] Lock "27c6dc17-4ded-4fe7-8fba-265eae64fc32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.767297] env[63345]: DEBUG nova.compute.manager [req-a90cef90-eea7-4dca-b297-f25e93f238ae req-aeba1185-cb33-466c-a95e-c83dbc584ff7 service nova] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] No waiting events found dispatching network-vif-plugged-8c1bd582-6867-4cba-9522-0e03560fa3f7 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 657.767458] env[63345]: WARNING nova.compute.manager [req-a90cef90-eea7-4dca-b297-f25e93f238ae req-aeba1185-cb33-466c-a95e-c83dbc584ff7 service nova] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Received unexpected event network-vif-plugged-8c1bd582-6867-4cba-9522-0e03560fa3f7 for instance with vm_state building and task_state spawning. [ 657.768010] env[63345]: DEBUG nova.compute.manager [req-a90cef90-eea7-4dca-b297-f25e93f238ae req-aeba1185-cb33-466c-a95e-c83dbc584ff7 service nova] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Received event network-changed-8c1bd582-6867-4cba-9522-0e03560fa3f7 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 657.768010] env[63345]: DEBUG nova.compute.manager [req-a90cef90-eea7-4dca-b297-f25e93f238ae req-aeba1185-cb33-466c-a95e-c83dbc584ff7 service nova] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Refreshing instance network info cache due to event network-changed-8c1bd582-6867-4cba-9522-0e03560fa3f7. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 657.768010] env[63345]: DEBUG oslo_concurrency.lockutils [req-a90cef90-eea7-4dca-b297-f25e93f238ae req-aeba1185-cb33-466c-a95e-c83dbc584ff7 service nova] Acquiring lock "refresh_cache-27c6dc17-4ded-4fe7-8fba-265eae64fc32" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.781584] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f468872f-6bcd-44f8-a898-32f28675b01b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.793879] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40bc17c8-810c-4cc4-94d9-f30b49328ba1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.826164] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15d9d3e-7845-4046-a4f2-8f94f1fd5ac3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.835029] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c553e8eb-74ab-485f-a830-40e4df1f9a31 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.849527] env[63345]: DEBUG nova.compute.provider_tree [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 657.862037] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]528b533b-4e48-e414-a078-71b114959528, 'name': SearchDatastore_Task, 'duration_secs': 0.066326} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.862762] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.862762] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 657.863062] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.863126] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.863339] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 657.863848] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f201db34-46de-4bec-918d-984d1315ff7f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.874311] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 657.874522] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 657.875355] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3a68129-249a-4d20-92dd-fc903d5ace05 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.879717] env[63345]: DEBUG oslo_concurrency.lockutils [req-b6d995bf-2dc1-49bd-8bb9-8aa21a266398 req-cdcbe7ed-c0a7-4f6b-a959-9222349e8a66 service nova] Releasing lock "refresh_cache-46d3332a-bfb9-4812-8201-a87467ce5151" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.881213] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Waiting for the task: (returnval){ [ 657.881213] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]527785c8-5890-1cc4-911e-cff44b509225" [ 657.881213] env[63345]: _type = "Task" [ 657.881213] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.890271] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]527785c8-5890-1cc4-911e-cff44b509225, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.975018] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': task-1016726, 'name': Rename_Task, 'duration_secs': 0.143216} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.975018] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 657.975018] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4558ac2d-c900-4149-848a-d0461a44a25d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.980057] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Waiting for the task: (returnval){ [ 657.980057] env[63345]: value = "task-1016727" [ 657.980057] env[63345]: _type = "Task" [ 657.980057] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.988884] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': task-1016727, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.264320] env[63345]: DEBUG nova.compute.manager [req-8e2f0dc2-cc1f-4982-8ff7-c969c6ddc624 req-07bb6765-7dc2-4001-bd7f-67d4d8c28740 service nova] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Received event network-changed-35f00929-4dc1-4515-b0de-19a6377c68ca {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 658.264520] env[63345]: DEBUG nova.compute.manager [req-8e2f0dc2-cc1f-4982-8ff7-c969c6ddc624 req-07bb6765-7dc2-4001-bd7f-67d4d8c28740 service nova] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Refreshing instance network info cache due to event network-changed-35f00929-4dc1-4515-b0de-19a6377c68ca. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 658.264883] env[63345]: DEBUG oslo_concurrency.lockutils [req-8e2f0dc2-cc1f-4982-8ff7-c969c6ddc624 req-07bb6765-7dc2-4001-bd7f-67d4d8c28740 service nova] Acquiring lock "refresh_cache-bc9d2e6a-f77a-4a21-90bc-81949cbfce91" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.264883] env[63345]: DEBUG oslo_concurrency.lockutils [req-8e2f0dc2-cc1f-4982-8ff7-c969c6ddc624 req-07bb6765-7dc2-4001-bd7f-67d4d8c28740 service nova] Acquired lock "refresh_cache-bc9d2e6a-f77a-4a21-90bc-81949cbfce91" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.265033] env[63345]: DEBUG nova.network.neutron [req-8e2f0dc2-cc1f-4982-8ff7-c969c6ddc624 req-07bb6765-7dc2-4001-bd7f-67d4d8c28740 service nova] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Refreshing network info cache for port 35f00929-4dc1-4515-b0de-19a6377c68ca {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 658.267478] env[63345]: DEBUG nova.network.neutron [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 658.298414] env[63345]: DEBUG nova.compute.manager [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 658.325040] env[63345]: DEBUG nova.virt.hardware [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 658.325254] env[63345]: DEBUG nova.virt.hardware [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 658.325325] env[63345]: DEBUG nova.virt.hardware [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 658.325494] env[63345]: DEBUG nova.virt.hardware [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 658.325642] env[63345]: DEBUG nova.virt.hardware [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 658.325790] env[63345]: DEBUG nova.virt.hardware [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 658.325997] env[63345]: DEBUG nova.virt.hardware [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 658.326179] env[63345]: DEBUG nova.virt.hardware [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 658.326348] env[63345]: DEBUG nova.virt.hardware [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 658.326537] env[63345]: DEBUG nova.virt.hardware [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 658.326681] env[63345]: DEBUG nova.virt.hardware [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 658.327548] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80f15e9-bc06-4cb1-a352-7b93a2afe4dc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.335390] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a250e92f-be73-4d1d-bc41-87d12705f88e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.355193] env[63345]: DEBUG nova.scheduler.client.report [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 658.391900] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]527785c8-5890-1cc4-911e-cff44b509225, 'name': SearchDatastore_Task, 'duration_secs': 0.042307} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.392719] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb1076d9-9a74-484d-85ef-63ff96304670 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.398335] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Waiting for the task: (returnval){ [ 658.398335] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]521e09fa-bd13-1378-d42d-8fb1a25051ed" [ 658.398335] env[63345]: _type = "Task" [ 658.398335] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.407227] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521e09fa-bd13-1378-d42d-8fb1a25051ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.479648] env[63345]: DEBUG nova.network.neutron [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Updating instance_info_cache with network_info: [{"id": "8c1bd582-6867-4cba-9522-0e03560fa3f7", "address": "fa:16:3e:3d:4f:aa", "network": {"id": "18285fd9-d154-415c-acbb-1494303e3b6c", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5dc99cc64e6c4d83928b309253a8df8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c1bd582-68", "ovs_interfaceid": "8c1bd582-6867-4cba-9522-0e03560fa3f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.490309] env[63345]: DEBUG oslo_vmware.api [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': task-1016727, 'name': PowerOnVM_Task, 'duration_secs': 0.467674} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.490577] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 658.490778] env[63345]: INFO nova.compute.manager [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Took 8.03 seconds to spawn the instance on the hypervisor. [ 658.490958] env[63345]: DEBUG nova.compute.manager [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 658.491755] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db790082-2c7f-43ab-b1e2-1efe222ab4a2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.850549] env[63345]: DEBUG nova.network.neutron [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Successfully updated port: cf06de95-5747-4226-b66c-b9ccca47321d {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 658.863533] env[63345]: DEBUG oslo_concurrency.lockutils [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.598s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 658.868134] env[63345]: DEBUG nova.compute.manager [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 658.872034] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 15.511s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 658.914785] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521e09fa-bd13-1378-d42d-8fb1a25051ed, 'name': SearchDatastore_Task, 'duration_secs': 0.0245} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.915698] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.915969] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 46d3332a-bfb9-4812-8201-a87467ce5151/46d3332a-bfb9-4812-8201-a87467ce5151.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 658.916265] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-08ce2c51-03a5-4921-bd96-470a352c1b8d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.924430] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Waiting for the task: (returnval){ [ 658.924430] env[63345]: value = "task-1016728" [ 658.924430] env[63345]: _type = "Task" [ 658.924430] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.936148] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': task-1016728, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.985439] env[63345]: DEBUG oslo_concurrency.lockutils [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Releasing lock "refresh_cache-27c6dc17-4ded-4fe7-8fba-265eae64fc32" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.985761] env[63345]: DEBUG nova.compute.manager [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Instance network_info: |[{"id": "8c1bd582-6867-4cba-9522-0e03560fa3f7", "address": "fa:16:3e:3d:4f:aa", "network": {"id": "18285fd9-d154-415c-acbb-1494303e3b6c", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5dc99cc64e6c4d83928b309253a8df8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c1bd582-68", "ovs_interfaceid": "8c1bd582-6867-4cba-9522-0e03560fa3f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 658.986067] env[63345]: DEBUG oslo_concurrency.lockutils [req-a90cef90-eea7-4dca-b297-f25e93f238ae req-aeba1185-cb33-466c-a95e-c83dbc584ff7 service nova] Acquired lock "refresh_cache-27c6dc17-4ded-4fe7-8fba-265eae64fc32" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.986251] env[63345]: DEBUG nova.network.neutron [req-a90cef90-eea7-4dca-b297-f25e93f238ae req-aeba1185-cb33-466c-a95e-c83dbc584ff7 service nova] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Refreshing network info cache for port 8c1bd582-6867-4cba-9522-0e03560fa3f7 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 658.987453] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:4f:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c1bd582-6867-4cba-9522-0e03560fa3f7', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 658.995056] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Creating folder: Project (80677040e91647d9afae9c71c48ed3f0). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 658.995835] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48034524-4a5b-4a58-baca-2290c8314b59 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.007315] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Created folder: Project (80677040e91647d9afae9c71c48ed3f0) in parent group-v225918. [ 659.007501] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Creating folder: Instances. Parent ref: group-v225952. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 659.009327] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a499de47-71de-4354-b8f9-abba540e54fd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.011275] env[63345]: INFO nova.compute.manager [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Took 26.64 seconds to build instance. [ 659.019044] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Created folder: Instances in parent group-v225952. [ 659.019279] env[63345]: DEBUG oslo.service.loopingcall [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 659.019461] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 659.019678] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-33b079bd-8138-4e2d-8610-a8a3a44c36f0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.043716] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 659.043716] env[63345]: value = "task-1016731" [ 659.043716] env[63345]: _type = "Task" [ 659.043716] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.077493] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016731, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.172961] env[63345]: DEBUG nova.network.neutron [req-8e2f0dc2-cc1f-4982-8ff7-c969c6ddc624 req-07bb6765-7dc2-4001-bd7f-67d4d8c28740 service nova] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Updated VIF entry in instance network info cache for port 35f00929-4dc1-4515-b0de-19a6377c68ca. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 659.172961] env[63345]: DEBUG nova.network.neutron [req-8e2f0dc2-cc1f-4982-8ff7-c969c6ddc624 req-07bb6765-7dc2-4001-bd7f-67d4d8c28740 service nova] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Updating instance_info_cache with network_info: [{"id": "35f00929-4dc1-4515-b0de-19a6377c68ca", "address": "fa:16:3e:9e:c6:f2", "network": {"id": "5159b9e8-dfb2-472c-bec6-f963867f9baf", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-134143484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be4b8982dd144c969cb530f52ed9297b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35f00929-4d", "ovs_interfaceid": "35f00929-4dc1-4515-b0de-19a6377c68ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.355371] env[63345]: DEBUG oslo_concurrency.lockutils [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.355371] env[63345]: DEBUG oslo_concurrency.lockutils [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquired lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.355936] env[63345]: DEBUG nova.network.neutron [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 659.374873] env[63345]: DEBUG nova.compute.utils [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 659.381653] env[63345]: DEBUG nova.compute.manager [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 659.382472] env[63345]: DEBUG nova.network.neutron [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 659.438297] env[63345]: DEBUG nova.policy [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec8b21b49963404b97d88bcdb756df1e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5e485a32dc3245458cc463dc819d05e7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 659.447300] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': task-1016728, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.514634] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5830808b-fa95-4e6c-9365-f8a60a27abdb tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Lock "04fd7aaa-658d-480d-8465-825f120477bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.242s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.559208] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016731, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.675331] env[63345]: DEBUG oslo_concurrency.lockutils [req-8e2f0dc2-cc1f-4982-8ff7-c969c6ddc624 req-07bb6765-7dc2-4001-bd7f-67d4d8c28740 service nova] Releasing lock "refresh_cache-bc9d2e6a-f77a-4a21-90bc-81949cbfce91" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 659.735624] env[63345]: DEBUG nova.network.neutron [req-a90cef90-eea7-4dca-b297-f25e93f238ae req-aeba1185-cb33-466c-a95e-c83dbc584ff7 service nova] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Updated VIF entry in instance network info cache for port 8c1bd582-6867-4cba-9522-0e03560fa3f7. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 659.736016] env[63345]: DEBUG nova.network.neutron [req-a90cef90-eea7-4dca-b297-f25e93f238ae req-aeba1185-cb33-466c-a95e-c83dbc584ff7 service nova] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Updating instance_info_cache with network_info: [{"id": "8c1bd582-6867-4cba-9522-0e03560fa3f7", "address": "fa:16:3e:3d:4f:aa", "network": {"id": "18285fd9-d154-415c-acbb-1494303e3b6c", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5dc99cc64e6c4d83928b309253a8df8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c1bd582-68", "ovs_interfaceid": "8c1bd582-6867-4cba-9522-0e03560fa3f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.817998] env[63345]: DEBUG nova.network.neutron [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Successfully created port: b7ded7af-2b2d-4b3a-8d80-03678b1e05cb {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 659.882583] env[63345]: DEBUG nova.compute.manager [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 659.889659] env[63345]: DEBUG nova.network.neutron [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 659.907100] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8633b772-0c84-491e-a09e-0565abc3f526 tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Acquiring lock "interface-04fd7aaa-658d-480d-8465-825f120477bc-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.907415] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8633b772-0c84-491e-a09e-0565abc3f526 tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Lock "interface-04fd7aaa-658d-480d-8465-825f120477bc-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.907771] env[63345]: DEBUG nova.objects.instance [None req-8633b772-0c84-491e-a09e-0565abc3f526 tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Lazy-loading 'flavor' on Instance uuid 04fd7aaa-658d-480d-8465-825f120477bc {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 659.918070] env[63345]: WARNING nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance ee31689b-bf0b-4737-86c7-5451c763e603 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 659.918229] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 28caa5f5-141a-4ef9-abb3-33a1973d99cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.918356] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance b4a7d6dd-98dc-49d8-b344-1878cd5a3f51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.918469] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance bc9d2e6a-f77a-4a21-90bc-81949cbfce91 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.918588] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 04fd7aaa-658d-480d-8465-825f120477bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.918701] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 46d3332a-bfb9-4812-8201-a87467ce5151 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.918816] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 27c6dc17-4ded-4fe7-8fba-265eae64fc32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.918926] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 070a834d-6478-4705-8df0-2a27c8780507 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.919047] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.938032] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': task-1016728, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.695092} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.940654] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 46d3332a-bfb9-4812-8201-a87467ce5151/46d3332a-bfb9-4812-8201-a87467ce5151.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 659.940905] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 659.941384] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e336e99a-edd0-4733-94f0-42d9a6b4f906 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.949678] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Waiting for the task: (returnval){ [ 659.949678] env[63345]: value = "task-1016732" [ 659.949678] env[63345]: _type = "Task" [ 659.949678] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.957248] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': task-1016732, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.017585] env[63345]: DEBUG nova.compute.manager [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 660.055736] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016731, 'name': CreateVM_Task, 'duration_secs': 0.865559} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.055903] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 660.056573] env[63345]: DEBUG oslo_concurrency.lockutils [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.056770] env[63345]: DEBUG oslo_concurrency.lockutils [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.057095] env[63345]: DEBUG oslo_concurrency.lockutils [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 660.057966] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81e08c41-33e5-41ce-a572-67cf3b64cceb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.063165] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 660.063165] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52939bff-2bd2-987a-2a0d-35736af053f0" [ 660.063165] env[63345]: _type = "Task" [ 660.063165] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.071156] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52939bff-2bd2-987a-2a0d-35736af053f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.074176] env[63345]: DEBUG nova.network.neutron [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Updating instance_info_cache with network_info: [{"id": "cf06de95-5747-4226-b66c-b9ccca47321d", "address": "fa:16:3e:9e:46:44", "network": {"id": "403ac06e-e45e-4215-bf0c-16ddd583ddc5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1349318740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac5c2a653dae436c97514507939c4e3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf06de95-57", "ovs_interfaceid": "cf06de95-5747-4226-b66c-b9ccca47321d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.239073] env[63345]: DEBUG oslo_concurrency.lockutils [req-a90cef90-eea7-4dca-b297-f25e93f238ae req-aeba1185-cb33-466c-a95e-c83dbc584ff7 service nova] Releasing lock "refresh_cache-27c6dc17-4ded-4fe7-8fba-265eae64fc32" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.411527] env[63345]: DEBUG nova.objects.instance [None req-8633b772-0c84-491e-a09e-0565abc3f526 tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Lazy-loading 'pci_requests' on Instance uuid 04fd7aaa-658d-480d-8465-825f120477bc {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 660.422226] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 805f9143-a8d8-4995-a20d-3b10ef3ab599 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 660.459556] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': task-1016732, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.481728} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.459929] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 660.460712] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b60a63-3f79-4b2b-8c4d-8039eb935c58 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.482587] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] 46d3332a-bfb9-4812-8201-a87467ce5151/46d3332a-bfb9-4812-8201-a87467ce5151.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 660.482843] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38dc3197-697f-4338-9197-935946417a2b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.501143] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Waiting for the task: (returnval){ [ 660.501143] env[63345]: value = "task-1016733" [ 660.501143] env[63345]: _type = "Task" [ 660.501143] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.509108] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': task-1016733, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.538621] env[63345]: DEBUG oslo_concurrency.lockutils [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.573189] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52939bff-2bd2-987a-2a0d-35736af053f0, 'name': SearchDatastore_Task, 'duration_secs': 0.08521} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.573777] env[63345]: DEBUG oslo_concurrency.lockutils [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.574108] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 660.574249] env[63345]: DEBUG oslo_concurrency.lockutils [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.574396] env[63345]: DEBUG oslo_concurrency.lockutils [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.574569] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 660.574811] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b04ff9b-c264-4b6f-bca9-780bf6ab6899 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.576686] env[63345]: DEBUG oslo_concurrency.lockutils [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Releasing lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.576957] env[63345]: DEBUG nova.compute.manager [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Instance network_info: |[{"id": "cf06de95-5747-4226-b66c-b9ccca47321d", "address": "fa:16:3e:9e:46:44", "network": {"id": "403ac06e-e45e-4215-bf0c-16ddd583ddc5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1349318740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac5c2a653dae436c97514507939c4e3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf06de95-57", "ovs_interfaceid": "cf06de95-5747-4226-b66c-b9ccca47321d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 660.577322] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:46:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e41c97-4d75-4041-ae71-321e7e9d480b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf06de95-5747-4226-b66c-b9ccca47321d', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 660.584631] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Creating folder: Project (ac5c2a653dae436c97514507939c4e3c). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 660.585191] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eca778a7-2b21-48f0-af60-3eb5d383f8ee {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.593899] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 660.594145] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 660.594843] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97ad735d-5ec3-4bbb-b110-822f2387b603 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.600015] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Created folder: Project (ac5c2a653dae436c97514507939c4e3c) in parent group-v225918. [ 660.600193] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Creating folder: Instances. Parent ref: group-v225955. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 660.600704] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-729cb4b3-6c84-4310-b527-b87db9e6789b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.603135] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 660.603135] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52ec83f8-c4ac-796c-7c9b-d0cae7462acc" [ 660.603135] env[63345]: _type = "Task" [ 660.603135] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.610990] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52ec83f8-c4ac-796c-7c9b-d0cae7462acc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.614531] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Created folder: Instances in parent group-v225955. [ 660.614531] env[63345]: DEBUG oslo.service.loopingcall [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 660.614667] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 660.614793] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e1338fa-14b6-484d-81d8-43e6981e08f3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.637079] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 660.637079] env[63345]: value = "task-1016736" [ 660.637079] env[63345]: _type = "Task" [ 660.637079] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.644964] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016736, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.763487] env[63345]: DEBUG nova.compute.manager [req-4906e9fe-9193-489d-ae77-08a4cb90a305 req-027273b3-2c02-4a6d-9a74-ae41b5fbc5fa service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Received event network-vif-plugged-cf06de95-5747-4226-b66c-b9ccca47321d {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 660.763700] env[63345]: DEBUG oslo_concurrency.lockutils [req-4906e9fe-9193-489d-ae77-08a4cb90a305 req-027273b3-2c02-4a6d-9a74-ae41b5fbc5fa service nova] Acquiring lock "070a834d-6478-4705-8df0-2a27c8780507-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.763905] env[63345]: DEBUG oslo_concurrency.lockutils [req-4906e9fe-9193-489d-ae77-08a4cb90a305 req-027273b3-2c02-4a6d-9a74-ae41b5fbc5fa service nova] Lock "070a834d-6478-4705-8df0-2a27c8780507-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.764093] env[63345]: DEBUG oslo_concurrency.lockutils [req-4906e9fe-9193-489d-ae77-08a4cb90a305 req-027273b3-2c02-4a6d-9a74-ae41b5fbc5fa service nova] Lock "070a834d-6478-4705-8df0-2a27c8780507-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.764264] env[63345]: DEBUG nova.compute.manager [req-4906e9fe-9193-489d-ae77-08a4cb90a305 req-027273b3-2c02-4a6d-9a74-ae41b5fbc5fa service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] No waiting events found dispatching network-vif-plugged-cf06de95-5747-4226-b66c-b9ccca47321d {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 660.764426] env[63345]: WARNING nova.compute.manager [req-4906e9fe-9193-489d-ae77-08a4cb90a305 req-027273b3-2c02-4a6d-9a74-ae41b5fbc5fa service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Received unexpected event network-vif-plugged-cf06de95-5747-4226-b66c-b9ccca47321d for instance with vm_state building and task_state spawning. [ 660.764583] env[63345]: DEBUG nova.compute.manager [req-4906e9fe-9193-489d-ae77-08a4cb90a305 req-027273b3-2c02-4a6d-9a74-ae41b5fbc5fa service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Received event network-changed-cf06de95-5747-4226-b66c-b9ccca47321d {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 660.764734] env[63345]: DEBUG nova.compute.manager [req-4906e9fe-9193-489d-ae77-08a4cb90a305 req-027273b3-2c02-4a6d-9a74-ae41b5fbc5fa service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Refreshing instance network info cache due to event network-changed-cf06de95-5747-4226-b66c-b9ccca47321d. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 660.765312] env[63345]: DEBUG oslo_concurrency.lockutils [req-4906e9fe-9193-489d-ae77-08a4cb90a305 req-027273b3-2c02-4a6d-9a74-ae41b5fbc5fa service nova] Acquiring lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.765312] env[63345]: DEBUG oslo_concurrency.lockutils [req-4906e9fe-9193-489d-ae77-08a4cb90a305 req-027273b3-2c02-4a6d-9a74-ae41b5fbc5fa service nova] Acquired lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.765450] env[63345]: DEBUG nova.network.neutron [req-4906e9fe-9193-489d-ae77-08a4cb90a305 req-027273b3-2c02-4a6d-9a74-ae41b5fbc5fa service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Refreshing network info cache for port cf06de95-5747-4226-b66c-b9ccca47321d {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 660.899268] env[63345]: DEBUG nova.compute.manager [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 660.915116] env[63345]: DEBUG nova.objects.base [None req-8633b772-0c84-491e-a09e-0565abc3f526 tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Object Instance<04fd7aaa-658d-480d-8465-825f120477bc> lazy-loaded attributes: flavor,pci_requests {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 660.915458] env[63345]: DEBUG nova.network.neutron [None req-8633b772-0c84-491e-a09e-0565abc3f526 tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 660.925567] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 3e4e58bd-903b-4b3d-8be4-5678aab6c721 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 660.929517] env[63345]: DEBUG nova.virt.hardware [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 660.929668] env[63345]: DEBUG nova.virt.hardware [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 660.929784] env[63345]: DEBUG nova.virt.hardware [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 660.929973] env[63345]: DEBUG nova.virt.hardware [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 660.930137] env[63345]: DEBUG nova.virt.hardware [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 660.930295] env[63345]: DEBUG nova.virt.hardware [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 660.930493] env[63345]: DEBUG nova.virt.hardware [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 660.930679] env[63345]: DEBUG nova.virt.hardware [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 660.930890] env[63345]: DEBUG nova.virt.hardware [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 660.931087] env[63345]: DEBUG nova.virt.hardware [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 660.931258] env[63345]: DEBUG nova.virt.hardware [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 660.932122] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d510a20c-d7c6-430d-8a7a-2d139263b9c2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.943368] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8913b9f1-55d2-4734-8ad0-a7b33d897695 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.011758] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': task-1016733, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.012740] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8633b772-0c84-491e-a09e-0565abc3f526 tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Lock "interface-04fd7aaa-658d-480d-8465-825f120477bc-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.105s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.116767] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52ec83f8-c4ac-796c-7c9b-d0cae7462acc, 'name': SearchDatastore_Task, 'duration_secs': 0.01854} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.117546] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-698a738a-bbec-4f30-93f7-ac07db72dd33 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.127705] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 661.127705] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52fca44f-59b8-5ac0-815a-8a4d6d26182e" [ 661.127705] env[63345]: _type = "Task" [ 661.127705] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.134489] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52fca44f-59b8-5ac0-815a-8a4d6d26182e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.147127] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016736, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.428476] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance e6bc8cb9-2f1a-49cb-974d-ea9a211126ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 661.487023] env[63345]: DEBUG nova.network.neutron [req-4906e9fe-9193-489d-ae77-08a4cb90a305 req-027273b3-2c02-4a6d-9a74-ae41b5fbc5fa service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Updated VIF entry in instance network info cache for port cf06de95-5747-4226-b66c-b9ccca47321d. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 661.487399] env[63345]: DEBUG nova.network.neutron [req-4906e9fe-9193-489d-ae77-08a4cb90a305 req-027273b3-2c02-4a6d-9a74-ae41b5fbc5fa service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Updating instance_info_cache with network_info: [{"id": "cf06de95-5747-4226-b66c-b9ccca47321d", "address": "fa:16:3e:9e:46:44", "network": {"id": "403ac06e-e45e-4215-bf0c-16ddd583ddc5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1349318740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac5c2a653dae436c97514507939c4e3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf06de95-57", "ovs_interfaceid": "cf06de95-5747-4226-b66c-b9ccca47321d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.514098] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': task-1016733, 'name': ReconfigVM_Task, 'duration_secs': 0.813855} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.514417] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Reconfigured VM instance instance-0000001e to attach disk [datastore2] 46d3332a-bfb9-4812-8201-a87467ce5151/46d3332a-bfb9-4812-8201-a87467ce5151.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 661.516171] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2348039d-bc75-4a60-b474-681126128523 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.523252] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Waiting for the task: (returnval){ [ 661.523252] env[63345]: value = "task-1016737" [ 661.523252] env[63345]: _type = "Task" [ 661.523252] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.531956] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': task-1016737, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.565143] env[63345]: DEBUG nova.network.neutron [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Successfully updated port: b7ded7af-2b2d-4b3a-8d80-03678b1e05cb {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 661.635103] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52fca44f-59b8-5ac0-815a-8a4d6d26182e, 'name': SearchDatastore_Task, 'duration_secs': 0.010065} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.635711] env[63345]: DEBUG oslo_concurrency.lockutils [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.635711] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 27c6dc17-4ded-4fe7-8fba-265eae64fc32/27c6dc17-4ded-4fe7-8fba-265eae64fc32.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 661.635927] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9877a4db-900c-49f7-9f21-3abc6808e370 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.647905] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016736, 'name': CreateVM_Task, 'duration_secs': 0.697064} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.648590] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 661.649354] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 661.649354] env[63345]: value = "task-1016738" [ 661.649354] env[63345]: _type = "Task" [ 661.649354] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.649938] env[63345]: DEBUG oslo_concurrency.lockutils [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.650107] env[63345]: DEBUG oslo_concurrency.lockutils [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.652047] env[63345]: DEBUG oslo_concurrency.lockutils [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 661.652047] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38c4c961-73e9-4fdd-98f2-07d381e4bded {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.661030] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 661.661030] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]528ffcd8-fdda-c427-e743-c9e301893648" [ 661.661030] env[63345]: _type = "Task" [ 661.661030] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.662878] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016738, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.670999] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]528ffcd8-fdda-c427-e743-c9e301893648, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.931844] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 7bef089c-e93b-4ba6-a683-4e076489f92a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 661.962539] env[63345]: DEBUG nova.compute.manager [req-fa607a22-db30-471b-9a6a-707b1f5fdf5a req-fa20cb97-0b37-4e6d-9215-5a6e6f9eec3b service nova] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Received event network-vif-plugged-b7ded7af-2b2d-4b3a-8d80-03678b1e05cb {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 661.962734] env[63345]: DEBUG oslo_concurrency.lockutils [req-fa607a22-db30-471b-9a6a-707b1f5fdf5a req-fa20cb97-0b37-4e6d-9215-5a6e6f9eec3b service nova] Acquiring lock "4d41f4a7-4fde-4d34-be7c-533c00fe5ae6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.963046] env[63345]: DEBUG oslo_concurrency.lockutils [req-fa607a22-db30-471b-9a6a-707b1f5fdf5a req-fa20cb97-0b37-4e6d-9215-5a6e6f9eec3b service nova] Lock "4d41f4a7-4fde-4d34-be7c-533c00fe5ae6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.963222] env[63345]: DEBUG oslo_concurrency.lockutils [req-fa607a22-db30-471b-9a6a-707b1f5fdf5a req-fa20cb97-0b37-4e6d-9215-5a6e6f9eec3b service nova] Lock "4d41f4a7-4fde-4d34-be7c-533c00fe5ae6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.963387] env[63345]: DEBUG nova.compute.manager [req-fa607a22-db30-471b-9a6a-707b1f5fdf5a req-fa20cb97-0b37-4e6d-9215-5a6e6f9eec3b service nova] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] No waiting events found dispatching network-vif-plugged-b7ded7af-2b2d-4b3a-8d80-03678b1e05cb {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 661.963546] env[63345]: WARNING nova.compute.manager [req-fa607a22-db30-471b-9a6a-707b1f5fdf5a req-fa20cb97-0b37-4e6d-9215-5a6e6f9eec3b service nova] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Received unexpected event network-vif-plugged-b7ded7af-2b2d-4b3a-8d80-03678b1e05cb for instance with vm_state building and task_state spawning. [ 661.989725] env[63345]: DEBUG oslo_concurrency.lockutils [req-4906e9fe-9193-489d-ae77-08a4cb90a305 req-027273b3-2c02-4a6d-9a74-ae41b5fbc5fa service nova] Releasing lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 662.033098] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': task-1016737, 'name': Rename_Task, 'duration_secs': 0.231634} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.033371] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 662.033599] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f025e85c-812e-46e3-ab51-919bc28e8ad1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.039784] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Waiting for the task: (returnval){ [ 662.039784] env[63345]: value = "task-1016739" [ 662.039784] env[63345]: _type = "Task" [ 662.039784] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.047287] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': task-1016739, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.068218] env[63345]: DEBUG oslo_concurrency.lockutils [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Acquiring lock "refresh_cache-4d41f4a7-4fde-4d34-be7c-533c00fe5ae6" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.068367] env[63345]: DEBUG oslo_concurrency.lockutils [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Acquired lock "refresh_cache-4d41f4a7-4fde-4d34-be7c-533c00fe5ae6" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.068514] env[63345]: DEBUG nova.network.neutron [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 662.160868] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016738, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.174741] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]528ffcd8-fdda-c427-e743-c9e301893648, 'name': SearchDatastore_Task, 'duration_secs': 0.027544} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.176196] env[63345]: DEBUG oslo_concurrency.lockutils [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 662.176196] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 662.176196] env[63345]: DEBUG oslo_concurrency.lockutils [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.176196] env[63345]: DEBUG oslo_concurrency.lockutils [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.176351] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 662.176459] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5d3ac17-9ade-43e0-80bd-cfdfd9a828b0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.196294] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 662.196509] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 662.197326] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bfa0f73-c53a-4219-a38e-07ebeb253933 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.203175] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 662.203175] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52433489-42e8-fe35-08df-215d1a990bd6" [ 662.203175] env[63345]: _type = "Task" [ 662.203175] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.213523] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52433489-42e8-fe35-08df-215d1a990bd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.437057] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 0d5cb238-2d25-47b1-8ce6-15a20836dbfb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 662.549749] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': task-1016739, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.599519] env[63345]: DEBUG nova.network.neutron [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 662.661592] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016738, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.882842} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.661867] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 27c6dc17-4ded-4fe7-8fba-265eae64fc32/27c6dc17-4ded-4fe7-8fba-265eae64fc32.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 662.662096] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 662.662343] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c4607614-2be3-4dc0-8d09-b0b51282196e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.668753] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 662.668753] env[63345]: value = "task-1016740" [ 662.668753] env[63345]: _type = "Task" [ 662.668753] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.681548] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016740, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.719163] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52433489-42e8-fe35-08df-215d1a990bd6, 'name': SearchDatastore_Task, 'duration_secs': 0.061005} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.719338] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4404225-4ec1-406f-84c6-a1cd8111024c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.724983] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 662.724983] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]520812f3-423b-3d85-20fd-be8983c82ba0" [ 662.724983] env[63345]: _type = "Task" [ 662.724983] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.737459] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]520812f3-423b-3d85-20fd-be8983c82ba0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.831810] env[63345]: DEBUG nova.network.neutron [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Updating instance_info_cache with network_info: [{"id": "b7ded7af-2b2d-4b3a-8d80-03678b1e05cb", "address": "fa:16:3e:f1:56:0e", "network": {"id": "da04e776-e1c6-4aba-a5fe-45cc0e02c8a7", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-76137959-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e485a32dc3245458cc463dc819d05e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7ded7af-2b", "ovs_interfaceid": "b7ded7af-2b2d-4b3a-8d80-03678b1e05cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.939944] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance c07c7f5d-a674-458f-8253-1bc2d61be6c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 663.054590] env[63345]: DEBUG oslo_vmware.api [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': task-1016739, 'name': PowerOnVM_Task, 'duration_secs': 0.745829} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.054590] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 663.054590] env[63345]: INFO nova.compute.manager [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Took 9.97 seconds to spawn the instance on the hypervisor. [ 663.054590] env[63345]: DEBUG nova.compute.manager [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 663.054590] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781209bb-a451-486c-8b77-aca93d1ef581 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.180100] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016740, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068409} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.180381] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 663.181312] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a2267f-753d-4d04-8a6e-0c125e135828 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.205935] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] 27c6dc17-4ded-4fe7-8fba-265eae64fc32/27c6dc17-4ded-4fe7-8fba-265eae64fc32.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 663.206250] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fcb02e1a-d81c-481d-bd90-dbc5a6273c97 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.231146] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 663.231146] env[63345]: value = "task-1016741" [ 663.231146] env[63345]: _type = "Task" [ 663.231146] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.240128] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]520812f3-423b-3d85-20fd-be8983c82ba0, 'name': SearchDatastore_Task, 'duration_secs': 0.039116} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.240831] env[63345]: DEBUG oslo_concurrency.lockutils [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.241122] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 070a834d-6478-4705-8df0-2a27c8780507/070a834d-6478-4705-8df0-2a27c8780507.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 663.241400] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ca4f3b6-aea7-42d5-a026-be9ea32f8b2e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.246929] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.250900] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 663.250900] env[63345]: value = "task-1016742" [ 663.250900] env[63345]: _type = "Task" [ 663.250900] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.258155] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1016742, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.260644] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Acquiring lock "04fd7aaa-658d-480d-8465-825f120477bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.260877] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Lock "04fd7aaa-658d-480d-8465-825f120477bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.261106] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Acquiring lock "04fd7aaa-658d-480d-8465-825f120477bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.261294] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Lock "04fd7aaa-658d-480d-8465-825f120477bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.261461] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Lock "04fd7aaa-658d-480d-8465-825f120477bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.263549] env[63345]: INFO nova.compute.manager [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Terminating instance [ 663.334257] env[63345]: DEBUG oslo_concurrency.lockutils [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Releasing lock "refresh_cache-4d41f4a7-4fde-4d34-be7c-533c00fe5ae6" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.334677] env[63345]: DEBUG nova.compute.manager [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Instance network_info: |[{"id": "b7ded7af-2b2d-4b3a-8d80-03678b1e05cb", "address": "fa:16:3e:f1:56:0e", "network": {"id": "da04e776-e1c6-4aba-a5fe-45cc0e02c8a7", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-76137959-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e485a32dc3245458cc463dc819d05e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7ded7af-2b", "ovs_interfaceid": "b7ded7af-2b2d-4b3a-8d80-03678b1e05cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 663.335161] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:56:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7c80243e-93a7-4a95-bc8d-e9534bacd66e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7ded7af-2b2d-4b3a-8d80-03678b1e05cb', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 663.343041] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Creating folder: Project (5e485a32dc3245458cc463dc819d05e7). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 663.343309] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1636ecce-7cb2-4058-97df-c183e9a8953a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.353674] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Created folder: Project (5e485a32dc3245458cc463dc819d05e7) in parent group-v225918. [ 663.353819] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Creating folder: Instances. Parent ref: group-v225958. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 663.354064] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-964321ef-f4cc-46e9-b44d-eab5d8fde660 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.362707] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Created folder: Instances in parent group-v225958. [ 663.363533] env[63345]: DEBUG oslo.service.loopingcall [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 663.363533] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 663.363533] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c93939d-bb3f-42a8-9d73-64720d0bfa39 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.383590] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 663.383590] env[63345]: value = "task-1016745" [ 663.383590] env[63345]: _type = "Task" [ 663.383590] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.392089] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016745, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.442774] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance f043239f-7158-4199-a784-d711a5a301be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 663.570857] env[63345]: INFO nova.compute.manager [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Took 28.18 seconds to build instance. [ 663.743128] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.761882] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1016742, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.767877] env[63345]: DEBUG nova.compute.manager [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 663.768127] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 663.769113] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4cdeba1-f035-4ab9-bd3e-2ff46d7af20f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.777266] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 663.777553] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a4147a1-4d08-4d82-9da5-f8c8256bb009 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.785152] env[63345]: DEBUG oslo_vmware.api [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Waiting for the task: (returnval){ [ 663.785152] env[63345]: value = "task-1016746" [ 663.785152] env[63345]: _type = "Task" [ 663.785152] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.796337] env[63345]: DEBUG oslo_vmware.api [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': task-1016746, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.896407] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016745, 'name': CreateVM_Task, 'duration_secs': 0.454978} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.900018] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 663.900018] env[63345]: DEBUG oslo_concurrency.lockutils [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.900018] env[63345]: DEBUG oslo_concurrency.lockutils [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.900018] env[63345]: DEBUG oslo_concurrency.lockutils [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 663.900018] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a7fa1d2-1b7c-47b8-a646-a3cd5d42f641 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.905340] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Waiting for the task: (returnval){ [ 663.905340] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52e48c7b-5873-9e34-9a6f-f2309d52db7a" [ 663.905340] env[63345]: _type = "Task" [ 663.905340] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.915794] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e48c7b-5873-9e34-9a6f-f2309d52db7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.945885] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 30755716-03a7-41bd-90c2-7ef21baf9975 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 664.073701] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ceb0a188-a15b-46f6-bc12-d7823e64f236 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Lock "46d3332a-bfb9-4812-8201-a87467ce5151" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.739s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 664.228034] env[63345]: DEBUG nova.compute.manager [req-14342ba2-ff4c-40a3-8947-6ffe8de94c39 req-4e807550-4049-4b7d-ab20-e9f9822c9279 service nova] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Received event network-changed-b7ded7af-2b2d-4b3a-8d80-03678b1e05cb {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 664.228265] env[63345]: DEBUG nova.compute.manager [req-14342ba2-ff4c-40a3-8947-6ffe8de94c39 req-4e807550-4049-4b7d-ab20-e9f9822c9279 service nova] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Refreshing instance network info cache due to event network-changed-b7ded7af-2b2d-4b3a-8d80-03678b1e05cb. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 664.228491] env[63345]: DEBUG oslo_concurrency.lockutils [req-14342ba2-ff4c-40a3-8947-6ffe8de94c39 req-4e807550-4049-4b7d-ab20-e9f9822c9279 service nova] Acquiring lock "refresh_cache-4d41f4a7-4fde-4d34-be7c-533c00fe5ae6" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.228644] env[63345]: DEBUG oslo_concurrency.lockutils [req-14342ba2-ff4c-40a3-8947-6ffe8de94c39 req-4e807550-4049-4b7d-ab20-e9f9822c9279 service nova] Acquired lock "refresh_cache-4d41f4a7-4fde-4d34-be7c-533c00fe5ae6" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.228813] env[63345]: DEBUG nova.network.neutron [req-14342ba2-ff4c-40a3-8947-6ffe8de94c39 req-4e807550-4049-4b7d-ab20-e9f9822c9279 service nova] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Refreshing network info cache for port b7ded7af-2b2d-4b3a-8d80-03678b1e05cb {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 664.244100] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016741, 'name': ReconfigVM_Task, 'duration_secs': 0.761828} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.244410] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Reconfigured VM instance instance-0000001f to attach disk [datastore2] 27c6dc17-4ded-4fe7-8fba-265eae64fc32/27c6dc17-4ded-4fe7-8fba-265eae64fc32.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 664.244989] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-824112fb-1a3c-434e-a305-96d7de9d7ec7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.255906] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 664.255906] env[63345]: value = "task-1016747" [ 664.255906] env[63345]: _type = "Task" [ 664.255906] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.263891] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1016742, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.667786} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.267241] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 070a834d-6478-4705-8df0-2a27c8780507/070a834d-6478-4705-8df0-2a27c8780507.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 664.267486] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 664.267762] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016747, 'name': Rename_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.268027] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cda5f0bd-e110-4dd7-9551-37f8f2c644a8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.274411] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 664.274411] env[63345]: value = "task-1016748" [ 664.274411] env[63345]: _type = "Task" [ 664.274411] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.282086] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1016748, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.293556] env[63345]: DEBUG oslo_vmware.api [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': task-1016746, 'name': PowerOffVM_Task, 'duration_secs': 0.339849} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.293660] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 664.295329] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 664.295329] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87371f7c-478b-47aa-b54f-d7a92df66c9a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.386020] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 664.386020] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 664.386020] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Deleting the datastore file [datastore2] 04fd7aaa-658d-480d-8465-825f120477bc {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 664.386020] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-55764911-9cdd-401f-a4d6-6c6eb7b1c7b3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.392963] env[63345]: DEBUG oslo_vmware.api [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Waiting for the task: (returnval){ [ 664.392963] env[63345]: value = "task-1016750" [ 664.392963] env[63345]: _type = "Task" [ 664.392963] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.401195] env[63345]: DEBUG oslo_vmware.api [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': task-1016750, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.414831] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e48c7b-5873-9e34-9a6f-f2309d52db7a, 'name': SearchDatastore_Task, 'duration_secs': 0.009617} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.415181] env[63345]: DEBUG oslo_concurrency.lockutils [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.415449] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 664.415791] env[63345]: DEBUG oslo_concurrency.lockutils [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.416067] env[63345]: DEBUG oslo_concurrency.lockutils [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.416271] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 664.416523] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97857693-5653-420f-9d5e-6a42392c2ae9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.424530] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 664.424530] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 664.424847] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cf76eee-35e9-4ffe-b05c-14f9326453ce {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.431974] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Waiting for the task: (returnval){ [ 664.431974] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52481025-793e-d4e3-3783-8a4f6957528f" [ 664.431974] env[63345]: _type = "Task" [ 664.431974] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.439598] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52481025-793e-d4e3-3783-8a4f6957528f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.453443] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 869f8110-6490-4a47-955a-0ce085f826af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 664.577502] env[63345]: DEBUG nova.compute.manager [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 664.753124] env[63345]: DEBUG nova.compute.manager [req-99c35b3e-ad85-449f-af51-4ab8c4144338 req-bc5aafca-c8f0-4c04-984a-b7172cb2ae99 service nova] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Received event network-changed-6fb848af-6632-4cdf-847d-138fe30c4a08 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 664.753350] env[63345]: DEBUG nova.compute.manager [req-99c35b3e-ad85-449f-af51-4ab8c4144338 req-bc5aafca-c8f0-4c04-984a-b7172cb2ae99 service nova] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Refreshing instance network info cache due to event network-changed-6fb848af-6632-4cdf-847d-138fe30c4a08. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 664.753514] env[63345]: DEBUG oslo_concurrency.lockutils [req-99c35b3e-ad85-449f-af51-4ab8c4144338 req-bc5aafca-c8f0-4c04-984a-b7172cb2ae99 service nova] Acquiring lock "refresh_cache-46d3332a-bfb9-4812-8201-a87467ce5151" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.753678] env[63345]: DEBUG oslo_concurrency.lockutils [req-99c35b3e-ad85-449f-af51-4ab8c4144338 req-bc5aafca-c8f0-4c04-984a-b7172cb2ae99 service nova] Acquired lock "refresh_cache-46d3332a-bfb9-4812-8201-a87467ce5151" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.753810] env[63345]: DEBUG nova.network.neutron [req-99c35b3e-ad85-449f-af51-4ab8c4144338 req-bc5aafca-c8f0-4c04-984a-b7172cb2ae99 service nova] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Refreshing network info cache for port 6fb848af-6632-4cdf-847d-138fe30c4a08 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 664.766826] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016747, 'name': Rename_Task, 'duration_secs': 0.137544} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.767098] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 664.767332] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-429eb359-f173-4edf-8bb7-75ff99ddce8c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.774748] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 664.774748] env[63345]: value = "task-1016751" [ 664.774748] env[63345]: _type = "Task" [ 664.774748] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.792462] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1016748, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064022} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.792699] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016751, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.793273] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 664.794325] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7e120f-78ea-44eb-922f-74c49dc3ec92 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.819836] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] 070a834d-6478-4705-8df0-2a27c8780507/070a834d-6478-4705-8df0-2a27c8780507.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 664.820156] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38d85917-e804-49be-9e41-85585bc21d0d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.841228] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 664.841228] env[63345]: value = "task-1016752" [ 664.841228] env[63345]: _type = "Task" [ 664.841228] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.851615] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1016752, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.903877] env[63345]: DEBUG oslo_vmware.api [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Task: {'id': task-1016750, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132047} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.904456] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 664.904687] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 664.904872] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 664.905063] env[63345]: INFO nova.compute.manager [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Took 1.14 seconds to destroy the instance on the hypervisor. [ 664.905306] env[63345]: DEBUG oslo.service.loopingcall [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 664.905602] env[63345]: DEBUG nova.compute.manager [-] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 664.905701] env[63345]: DEBUG nova.network.neutron [-] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 664.943336] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52481025-793e-d4e3-3783-8a4f6957528f, 'name': SearchDatastore_Task, 'duration_secs': 0.008534} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.944083] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a1550b6-f71b-4c42-a206-9d5fef1a8096 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.950084] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Waiting for the task: (returnval){ [ 664.950084] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52395c2b-833a-383c-12db-d9d440a9e0c1" [ 664.950084] env[63345]: _type = "Task" [ 664.950084] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.959761] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52395c2b-833a-383c-12db-d9d440a9e0c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.959761] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance abc81fa5-78a9-48b1-a49e-2faffddf2411 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 665.078361] env[63345]: DEBUG nova.network.neutron [req-14342ba2-ff4c-40a3-8947-6ffe8de94c39 req-4e807550-4049-4b7d-ab20-e9f9822c9279 service nova] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Updated VIF entry in instance network info cache for port b7ded7af-2b2d-4b3a-8d80-03678b1e05cb. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 665.078627] env[63345]: DEBUG nova.network.neutron [req-14342ba2-ff4c-40a3-8947-6ffe8de94c39 req-4e807550-4049-4b7d-ab20-e9f9822c9279 service nova] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Updating instance_info_cache with network_info: [{"id": "b7ded7af-2b2d-4b3a-8d80-03678b1e05cb", "address": "fa:16:3e:f1:56:0e", "network": {"id": "da04e776-e1c6-4aba-a5fe-45cc0e02c8a7", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-76137959-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e485a32dc3245458cc463dc819d05e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7ded7af-2b", "ovs_interfaceid": "b7ded7af-2b2d-4b3a-8d80-03678b1e05cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.106733] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.287276] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016751, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.352449] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1016752, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.464954] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 665.466350] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52395c2b-833a-383c-12db-d9d440a9e0c1, 'name': SearchDatastore_Task, 'duration_secs': 0.039687} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.466805] env[63345]: DEBUG oslo_concurrency.lockutils [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.467070] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6/4d41f4a7-4fde-4d34-be7c-533c00fe5ae6.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 665.467328] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aeb61eb6-7541-4b61-83ad-0ef4ef2abcbf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.474415] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Waiting for the task: (returnval){ [ 665.474415] env[63345]: value = "task-1016753" [ 665.474415] env[63345]: _type = "Task" [ 665.474415] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.486160] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': task-1016753, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.522704] env[63345]: DEBUG nova.network.neutron [req-99c35b3e-ad85-449f-af51-4ab8c4144338 req-bc5aafca-c8f0-4c04-984a-b7172cb2ae99 service nova] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Updated VIF entry in instance network info cache for port 6fb848af-6632-4cdf-847d-138fe30c4a08. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 665.523106] env[63345]: DEBUG nova.network.neutron [req-99c35b3e-ad85-449f-af51-4ab8c4144338 req-bc5aafca-c8f0-4c04-984a-b7172cb2ae99 service nova] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Updating instance_info_cache with network_info: [{"id": "6fb848af-6632-4cdf-847d-138fe30c4a08", "address": "fa:16:3e:c8:a8:d4", "network": {"id": "0a7828e5-642e-4e9f-af2d-cf4de69f3d8a", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-2048304741-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.204", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "621255ab615842da94141f5949873177", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fb848af-66", "ovs_interfaceid": "6fb848af-6632-4cdf-847d-138fe30c4a08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.581742] env[63345]: DEBUG oslo_concurrency.lockutils [req-14342ba2-ff4c-40a3-8947-6ffe8de94c39 req-4e807550-4049-4b7d-ab20-e9f9822c9279 service nova] Releasing lock "refresh_cache-4d41f4a7-4fde-4d34-be7c-533c00fe5ae6" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.678038] env[63345]: DEBUG nova.network.neutron [-] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.791193] env[63345]: DEBUG oslo_vmware.api [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016751, 'name': PowerOnVM_Task, 'duration_secs': 0.71979} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.791193] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 665.791193] env[63345]: INFO nova.compute.manager [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Took 10.06 seconds to spawn the instance on the hypervisor. [ 665.791193] env[63345]: DEBUG nova.compute.manager [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 665.791193] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6fa768-0dfd-472b-a5d3-8587cdc4ac74 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.852249] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1016752, 'name': ReconfigVM_Task, 'duration_secs': 0.643773} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.852528] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Reconfigured VM instance instance-00000020 to attach disk [datastore2] 070a834d-6478-4705-8df0-2a27c8780507/070a834d-6478-4705-8df0-2a27c8780507.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 665.853179] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa0c50a5-641f-47a7-a57f-7e2ad797ab67 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.859088] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 665.859088] env[63345]: value = "task-1016754" [ 665.859088] env[63345]: _type = "Task" [ 665.859088] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.866981] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1016754, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.968315] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 34e0234c-36c4-4878-979b-46f045bd1785 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 665.987170] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': task-1016753, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.026827] env[63345]: DEBUG oslo_concurrency.lockutils [req-99c35b3e-ad85-449f-af51-4ab8c4144338 req-bc5aafca-c8f0-4c04-984a-b7172cb2ae99 service nova] Releasing lock "refresh_cache-46d3332a-bfb9-4812-8201-a87467ce5151" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.182778] env[63345]: INFO nova.compute.manager [-] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Took 1.28 seconds to deallocate network for instance. [ 666.306878] env[63345]: INFO nova.compute.manager [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Took 28.79 seconds to build instance. [ 666.370027] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1016754, 'name': Rename_Task, 'duration_secs': 0.185635} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.370137] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 666.370374] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39229eb9-3065-4f44-9d50-27e135f2cb9f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.376640] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 666.376640] env[63345]: value = "task-1016755" [ 666.376640] env[63345]: _type = "Task" [ 666.376640] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.384341] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1016755, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.471932] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 85fb1ecd-4ca3-401d-a87a-131f0b275506 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 666.487036] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': task-1016753, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.774488} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.487036] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6/4d41f4a7-4fde-4d34-be7c-533c00fe5ae6.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 666.487036] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 666.487036] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ab85eaa5-1277-471f-bc1e-84102a61c1db {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.493722] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Waiting for the task: (returnval){ [ 666.493722] env[63345]: value = "task-1016756" [ 666.493722] env[63345]: _type = "Task" [ 666.493722] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.501244] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': task-1016756, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.691063] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.810065] env[63345]: DEBUG oslo_concurrency.lockutils [None req-08b139e1-fd9d-4dac-bacd-78e7e1af87de tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "27c6dc17-4ded-4fe7-8fba-265eae64fc32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.777s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.888595] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1016755, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.956977] env[63345]: DEBUG nova.compute.manager [req-c3e7cddd-c138-4af2-84f9-0992e4c916bb req-0591fd8e-e68d-4d54-94b6-da985f9303af service nova] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Received event network-vif-deleted-d9fdd10f-fc7f-4c46-9538-13164c07d369 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 666.974845] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 3101726f-5b14-417e-bcf8-390ce1f9b467 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 667.003360] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': task-1016756, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066498} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.003664] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 667.004470] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-963429a9-b928-47c2-bdec-5accfa438b20 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.027126] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6/4d41f4a7-4fde-4d34-be7c-533c00fe5ae6.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 667.027948] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86454d29-9353-4176-8b7b-72d25470b0c3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.050515] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Waiting for the task: (returnval){ [ 667.050515] env[63345]: value = "task-1016757" [ 667.050515] env[63345]: _type = "Task" [ 667.050515] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.065383] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': task-1016757, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.313202] env[63345]: DEBUG nova.compute.manager [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 667.388577] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1016755, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.478468] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 64fcf837-1d9d-41b1-a2a1-3c16362932cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 667.561745] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': task-1016757, 'name': ReconfigVM_Task, 'duration_secs': 0.322578} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.563055] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Reconfigured VM instance instance-00000021 to attach disk [datastore2] 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6/4d41f4a7-4fde-4d34-be7c-533c00fe5ae6.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 667.563055] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ef886c32-4340-4cca-9f81-0ec576bfa036 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.569315] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Waiting for the task: (returnval){ [ 667.569315] env[63345]: value = "task-1016758" [ 667.569315] env[63345]: _type = "Task" [ 667.569315] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.578561] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': task-1016758, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.837228] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.888990] env[63345]: DEBUG oslo_vmware.api [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1016755, 'name': PowerOnVM_Task, 'duration_secs': 1.128362} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.889281] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 667.889477] env[63345]: INFO nova.compute.manager [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Took 9.59 seconds to spawn the instance on the hypervisor. [ 667.889650] env[63345]: DEBUG nova.compute.manager [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 667.890439] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968f7485-ee20-40a3-a302-34bf3553266e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.956740] env[63345]: DEBUG nova.compute.manager [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Stashing vm_state: active {{(pid=63345) _prep_resize /opt/stack/nova/nova/compute/manager.py:5953}} [ 667.982715] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 3b0d115d-dad5-4881-a0e0-b98f555da533 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 668.080134] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': task-1016758, 'name': Rename_Task, 'duration_secs': 0.140556} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.080449] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 668.080724] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f865f43-a255-4533-b1ef-b92b3bbb64da {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.089023] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Waiting for the task: (returnval){ [ 668.089023] env[63345]: value = "task-1016759" [ 668.089023] env[63345]: _type = "Task" [ 668.089023] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.098613] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': task-1016759, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.410374] env[63345]: INFO nova.compute.manager [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Took 29.37 seconds to build instance. [ 668.482983] env[63345]: DEBUG oslo_concurrency.lockutils [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.484865] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 4a59b565-571f-48ef-97bd-bed9853e2d8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 668.597970] env[63345]: DEBUG oslo_vmware.api [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': task-1016759, 'name': PowerOnVM_Task, 'duration_secs': 0.499584} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.598341] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 668.598546] env[63345]: INFO nova.compute.manager [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Took 7.70 seconds to spawn the instance on the hypervisor. [ 668.598733] env[63345]: DEBUG nova.compute.manager [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 668.599625] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eac5cd5-ab13-41c9-bed5-e6fed3901117 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.912509] env[63345]: DEBUG oslo_concurrency.lockutils [None req-36d54760-824a-444d-a3cc-f56897703086 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "070a834d-6478-4705-8df0-2a27c8780507" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.041s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.987363] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance d3e99100-f13f-4019-9b5a-adaa65dacc5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 669.120435] env[63345]: INFO nova.compute.manager [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Took 29.44 seconds to build instance. [ 669.214867] env[63345]: DEBUG oslo_concurrency.lockutils [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "bcec23fe-75c7-479e-9210-85ca6781d7e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.215134] env[63345]: DEBUG oslo_concurrency.lockutils [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "bcec23fe-75c7-479e-9210-85ca6781d7e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.415888] env[63345]: DEBUG nova.compute.manager [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 669.490243] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 6cbe136b-5bf6-4f17-bcef-b712d850615f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 669.622116] env[63345]: DEBUG oslo_concurrency.lockutils [None req-283b0442-d7ad-4e86-816d-caafeb0fa02b tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Lock "4d41f4a7-4fde-4d34-be7c-533c00fe5ae6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.045s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.941667] env[63345]: DEBUG oslo_concurrency.lockutils [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.992741] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 778faa4f-4c5f-4ec2-b17b-5d7513c9c218 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 670.126617] env[63345]: DEBUG nova.compute.manager [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 670.209360] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Acquiring lock "4d41f4a7-4fde-4d34-be7c-533c00fe5ae6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.209640] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Lock "4d41f4a7-4fde-4d34-be7c-533c00fe5ae6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.209945] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Acquiring lock "4d41f4a7-4fde-4d34-be7c-533c00fe5ae6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.210175] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Lock "4d41f4a7-4fde-4d34-be7c-533c00fe5ae6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.210366] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Lock "4d41f4a7-4fde-4d34-be7c-533c00fe5ae6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.213243] env[63345]: INFO nova.compute.manager [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Terminating instance [ 670.496176] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 93112cc1-f9a1-4188-9555-bddf483426a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 670.646347] env[63345]: DEBUG oslo_concurrency.lockutils [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.717361] env[63345]: DEBUG nova.compute.manager [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 670.717595] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 670.718574] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ee7c01-8c6c-4a47-9284-ae280c7ac7b3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.726837] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 670.727084] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68e6b090-b475-40c3-946a-c2e9d3b526b5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.733784] env[63345]: DEBUG oslo_vmware.api [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Waiting for the task: (returnval){ [ 670.733784] env[63345]: value = "task-1016760" [ 670.733784] env[63345]: _type = "Task" [ 670.733784] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.742181] env[63345]: DEBUG oslo_vmware.api [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': task-1016760, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.999422] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance e3d52cbd-e768-4425-b83e-180a6e58fd00 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 671.244716] env[63345]: DEBUG oslo_vmware.api [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': task-1016760, 'name': PowerOffVM_Task, 'duration_secs': 0.280927} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.244986] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 671.245204] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 671.245464] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19f1eed3-6d21-4256-9075-98d4cbd8c50e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.322057] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 671.322443] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 671.322664] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Deleting the datastore file [datastore2] 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 671.322944] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2bf2b845-76cc-424a-9958-0ea5ef2d5306 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.329919] env[63345]: DEBUG oslo_vmware.api [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Waiting for the task: (returnval){ [ 671.329919] env[63345]: value = "task-1016762" [ 671.329919] env[63345]: _type = "Task" [ 671.329919] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.337814] env[63345]: DEBUG oslo_vmware.api [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': task-1016762, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.503080] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 3a85df04-3997-48a3-8992-f24fe997b3cc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 671.839673] env[63345]: DEBUG oslo_vmware.api [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': task-1016762, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.005877] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance b5173471-3367-42ba-b450-62ad8573f048 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 672.006195] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 672.006348] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 672.340969] env[63345]: DEBUG oslo_vmware.api [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Task: {'id': task-1016762, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.939922} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.343453] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 672.343662] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 672.343844] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 672.344035] env[63345]: INFO nova.compute.manager [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Took 1.63 seconds to destroy the instance on the hypervisor. [ 672.344278] env[63345]: DEBUG oslo.service.loopingcall [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 672.344646] env[63345]: DEBUG nova.compute.manager [-] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 672.344749] env[63345]: DEBUG nova.network.neutron [-] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 672.430365] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df35fc8b-d196-4e24-b783-7821605c0e78 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.438063] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4bceff-2a65-43d3-ac09-e3b5330a66f8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.468132] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b1bdac-27bf-46f8-93f5-497c78c9047d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.475400] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca6e7df-17b0-4305-930f-4073d4b2e7ae {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.488751] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.652952] env[63345]: DEBUG nova.compute.manager [req-dfcfbd10-2d78-4673-90b4-eb10d0ff9097 req-6ee85890-9899-47ba-aec1-88c608a97b11 service nova] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Received event network-vif-deleted-b7ded7af-2b2d-4b3a-8d80-03678b1e05cb {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 672.653222] env[63345]: INFO nova.compute.manager [req-dfcfbd10-2d78-4673-90b4-eb10d0ff9097 req-6ee85890-9899-47ba-aec1-88c608a97b11 service nova] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Neutron deleted interface b7ded7af-2b2d-4b3a-8d80-03678b1e05cb; detaching it from the instance and deleting it from the info cache [ 672.653340] env[63345]: DEBUG nova.network.neutron [req-dfcfbd10-2d78-4673-90b4-eb10d0ff9097 req-6ee85890-9899-47ba-aec1-88c608a97b11 service nova] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.992212] env[63345]: DEBUG nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 673.091638] env[63345]: DEBUG nova.network.neutron [-] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.155885] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ddce327-350c-4e1a-a57f-a1294de7850c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.165830] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b7fa21e-2265-40f2-863d-8fc39a35509b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.199032] env[63345]: DEBUG nova.compute.manager [req-dfcfbd10-2d78-4673-90b4-eb10d0ff9097 req-6ee85890-9899-47ba-aec1-88c608a97b11 service nova] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Detach interface failed, port_id=b7ded7af-2b2d-4b3a-8d80-03678b1e05cb, reason: Instance 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 673.497584] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63345) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 673.497952] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 14.627s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.498193] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.613s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.499923] env[63345]: INFO nova.compute.claims [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 673.593882] env[63345]: INFO nova.compute.manager [-] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Took 1.25 seconds to deallocate network for instance. [ 674.101063] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.910014] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63fe5ead-a319-49aa-99bc-fcab3cbb5692 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.918107] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d20869-41cb-4d68-85fc-0e51e4f5dd7a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.949228] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf82c9f-49bb-4561-aa97-fb3dca7f72aa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.956626] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a01c0a3-4d1c-4b78-ab80-40a19398fb61 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.970380] env[63345]: DEBUG nova.compute.provider_tree [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.473864] env[63345]: DEBUG nova.scheduler.client.report [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 675.981981] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.484s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.982864] env[63345]: DEBUG nova.compute.manager [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 675.985257] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.986s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.988218] env[63345]: INFO nova.compute.claims [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 676.493558] env[63345]: DEBUG nova.compute.utils [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 676.497916] env[63345]: DEBUG nova.compute.manager [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 676.498193] env[63345]: DEBUG nova.network.neutron [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 676.540766] env[63345]: DEBUG nova.policy [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7ddd789e894242559ad0ad1974e57ff8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87516cd599534b94801951669a97a9e0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 676.812498] env[63345]: DEBUG nova.network.neutron [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Successfully created port: 0ae421d2-83f3-4520-8a37-01cb6a91a3f5 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 677.001479] env[63345]: DEBUG nova.compute.manager [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 677.447317] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb68e78-2623-440f-a07b-4c2edd0bf4a7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.455667] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe21e58-cb84-4d4b-b2c2-bcf29fcc2270 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.484855] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c885c834-1bd0-41d4-806f-c21ee81c2d3b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.491909] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a41a5c-c03d-4d01-8f00-a3c46907c288 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.510171] env[63345]: DEBUG nova.compute.provider_tree [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.013360] env[63345]: DEBUG nova.compute.manager [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 678.015943] env[63345]: DEBUG nova.scheduler.client.report [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 678.041521] env[63345]: DEBUG nova.virt.hardware [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 678.041761] env[63345]: DEBUG nova.virt.hardware [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 678.041917] env[63345]: DEBUG nova.virt.hardware [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 678.042119] env[63345]: DEBUG nova.virt.hardware [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 678.042265] env[63345]: DEBUG nova.virt.hardware [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 678.042413] env[63345]: DEBUG nova.virt.hardware [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 678.042618] env[63345]: DEBUG nova.virt.hardware [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 678.042778] env[63345]: DEBUG nova.virt.hardware [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 678.042946] env[63345]: DEBUG nova.virt.hardware [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 678.043123] env[63345]: DEBUG nova.virt.hardware [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 678.043299] env[63345]: DEBUG nova.virt.hardware [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 678.044372] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa1d937-c206-41f4-8a0e-907735acc0b7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.052345] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23cc0eea-bc92-4db3-86d0-0ebf5655843c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.317494] env[63345]: DEBUG nova.compute.manager [req-fe348945-3c1f-4fdc-84e4-a6314ad21988 req-2bcf6eed-9a31-40b4-96ab-20a1d872768e service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Received event network-vif-plugged-0ae421d2-83f3-4520-8a37-01cb6a91a3f5 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 678.317677] env[63345]: DEBUG oslo_concurrency.lockutils [req-fe348945-3c1f-4fdc-84e4-a6314ad21988 req-2bcf6eed-9a31-40b4-96ab-20a1d872768e service nova] Acquiring lock "805f9143-a8d8-4995-a20d-3b10ef3ab599-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.317913] env[63345]: DEBUG oslo_concurrency.lockutils [req-fe348945-3c1f-4fdc-84e4-a6314ad21988 req-2bcf6eed-9a31-40b4-96ab-20a1d872768e service nova] Lock "805f9143-a8d8-4995-a20d-3b10ef3ab599-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.318100] env[63345]: DEBUG oslo_concurrency.lockutils [req-fe348945-3c1f-4fdc-84e4-a6314ad21988 req-2bcf6eed-9a31-40b4-96ab-20a1d872768e service nova] Lock "805f9143-a8d8-4995-a20d-3b10ef3ab599-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.318276] env[63345]: DEBUG nova.compute.manager [req-fe348945-3c1f-4fdc-84e4-a6314ad21988 req-2bcf6eed-9a31-40b4-96ab-20a1d872768e service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] No waiting events found dispatching network-vif-plugged-0ae421d2-83f3-4520-8a37-01cb6a91a3f5 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 678.318439] env[63345]: WARNING nova.compute.manager [req-fe348945-3c1f-4fdc-84e4-a6314ad21988 req-2bcf6eed-9a31-40b4-96ab-20a1d872768e service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Received unexpected event network-vif-plugged-0ae421d2-83f3-4520-8a37-01cb6a91a3f5 for instance with vm_state building and task_state spawning. [ 678.356018] env[63345]: DEBUG nova.network.neutron [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Successfully updated port: 0ae421d2-83f3-4520-8a37-01cb6a91a3f5 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 678.524568] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.539s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.525130] env[63345]: DEBUG nova.compute.manager [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 678.528032] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.486s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.529436] env[63345]: INFO nova.compute.claims [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 678.862059] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Acquiring lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.862270] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Acquired lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.862389] env[63345]: DEBUG nova.network.neutron [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 679.033961] env[63345]: DEBUG nova.compute.utils [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 679.038632] env[63345]: DEBUG nova.compute.manager [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 679.038632] env[63345]: DEBUG nova.network.neutron [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 679.084579] env[63345]: DEBUG nova.policy [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92bccb8029854f0ea2cef0747513b8bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a1f3a565957a4316af1b8fa14f81e75a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 679.329351] env[63345]: DEBUG nova.network.neutron [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Successfully created port: 1e56115d-0d69-4b50-8607-b08677046c73 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 679.427098] env[63345]: DEBUG nova.network.neutron [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 679.538769] env[63345]: DEBUG nova.compute.manager [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 679.631899] env[63345]: DEBUG nova.network.neutron [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Updating instance_info_cache with network_info: [{"id": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "address": "fa:16:3e:3e:4a:12", "network": {"id": "a7e35920-e04b-4676-9e3a-a322585417c0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1167528582-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87516cd599534b94801951669a97a9e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ae421d2-83", "ovs_interfaceid": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.012997] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a98567b-1f84-42f4-8d48-eec4311d012b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.020514] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dfa115b-11b4-47eb-8ab8-df18e8c9e510 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.052993] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e042b6b-cfde-4649-a5ed-f25edf776ecf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.060227] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b500bcf-491e-4245-b662-9e86397b9859 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.073516] env[63345]: DEBUG nova.compute.provider_tree [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 680.138590] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Releasing lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.138920] env[63345]: DEBUG nova.compute.manager [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Instance network_info: |[{"id": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "address": "fa:16:3e:3e:4a:12", "network": {"id": "a7e35920-e04b-4676-9e3a-a322585417c0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1167528582-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87516cd599534b94801951669a97a9e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ae421d2-83", "ovs_interfaceid": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 680.139362] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:4a:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd33839ae-40ca-471b-92e3-eb282b920682', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0ae421d2-83f3-4520-8a37-01cb6a91a3f5', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 680.147253] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Creating folder: Project (87516cd599534b94801951669a97a9e0). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 680.147253] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65508bde-ee8a-4b0d-802c-7d881deca1e1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.157782] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Created folder: Project (87516cd599534b94801951669a97a9e0) in parent group-v225918. [ 680.157983] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Creating folder: Instances. Parent ref: group-v225961. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 680.158294] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb361f6e-e310-40a7-9917-44f4a135d8da {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.167181] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Created folder: Instances in parent group-v225961. [ 680.167438] env[63345]: DEBUG oslo.service.loopingcall [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 680.167599] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 680.167868] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-304b782c-416e-4d47-9bc6-c3c91f6a20ee {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.186367] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 680.186367] env[63345]: value = "task-1016765" [ 680.186367] env[63345]: _type = "Task" [ 680.186367] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.194224] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016765, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.346264] env[63345]: DEBUG nova.compute.manager [req-eef72164-e143-45b5-b07e-5d20a4fbb75c req-00f20014-8b1a-4fe8-b654-85d8b050e4ee service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Received event network-changed-0ae421d2-83f3-4520-8a37-01cb6a91a3f5 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 680.346264] env[63345]: DEBUG nova.compute.manager [req-eef72164-e143-45b5-b07e-5d20a4fbb75c req-00f20014-8b1a-4fe8-b654-85d8b050e4ee service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Refreshing instance network info cache due to event network-changed-0ae421d2-83f3-4520-8a37-01cb6a91a3f5. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 680.346264] env[63345]: DEBUG oslo_concurrency.lockutils [req-eef72164-e143-45b5-b07e-5d20a4fbb75c req-00f20014-8b1a-4fe8-b654-85d8b050e4ee service nova] Acquiring lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.346557] env[63345]: DEBUG oslo_concurrency.lockutils [req-eef72164-e143-45b5-b07e-5d20a4fbb75c req-00f20014-8b1a-4fe8-b654-85d8b050e4ee service nova] Acquired lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.346557] env[63345]: DEBUG nova.network.neutron [req-eef72164-e143-45b5-b07e-5d20a4fbb75c req-00f20014-8b1a-4fe8-b654-85d8b050e4ee service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Refreshing network info cache for port 0ae421d2-83f3-4520-8a37-01cb6a91a3f5 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 680.557834] env[63345]: DEBUG nova.compute.manager [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 680.576728] env[63345]: DEBUG nova.scheduler.client.report [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 680.583624] env[63345]: DEBUG nova.virt.hardware [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 680.585133] env[63345]: DEBUG nova.virt.hardware [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 680.585133] env[63345]: DEBUG nova.virt.hardware [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 680.585133] env[63345]: DEBUG nova.virt.hardware [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 680.585133] env[63345]: DEBUG nova.virt.hardware [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 680.585133] env[63345]: DEBUG nova.virt.hardware [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 680.585407] env[63345]: DEBUG nova.virt.hardware [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 680.585407] env[63345]: DEBUG nova.virt.hardware [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 680.585407] env[63345]: DEBUG nova.virt.hardware [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 680.585407] env[63345]: DEBUG nova.virt.hardware [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 680.585407] env[63345]: DEBUG nova.virt.hardware [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 680.586106] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ced618-8c5c-4e80-a2d3-8af40f59f940 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.596601] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7a242e-bf04-4a76-8f89-ebfc1679b0d8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.696280] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016765, 'name': CreateVM_Task, 'duration_secs': 0.32398} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.696456] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 680.697158] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.697331] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.697661] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 680.697932] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17f57600-1fa6-4c3c-8146-d6851f1bff83 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.703200] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Waiting for the task: (returnval){ [ 680.703200] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52636ff3-5ddb-309e-6cff-9ab5fd2dad97" [ 680.703200] env[63345]: _type = "Task" [ 680.703200] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.710744] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52636ff3-5ddb-309e-6cff-9ab5fd2dad97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.825031] env[63345]: DEBUG nova.network.neutron [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Successfully updated port: 1e56115d-0d69-4b50-8607-b08677046c73 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 681.048964] env[63345]: DEBUG nova.network.neutron [req-eef72164-e143-45b5-b07e-5d20a4fbb75c req-00f20014-8b1a-4fe8-b654-85d8b050e4ee service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Updated VIF entry in instance network info cache for port 0ae421d2-83f3-4520-8a37-01cb6a91a3f5. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 681.049356] env[63345]: DEBUG nova.network.neutron [req-eef72164-e143-45b5-b07e-5d20a4fbb75c req-00f20014-8b1a-4fe8-b654-85d8b050e4ee service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Updating instance_info_cache with network_info: [{"id": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "address": "fa:16:3e:3e:4a:12", "network": {"id": "a7e35920-e04b-4676-9e3a-a322585417c0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1167528582-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87516cd599534b94801951669a97a9e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ae421d2-83", "ovs_interfaceid": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.081798] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.553s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.081915] env[63345]: DEBUG nova.compute.manager [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 681.085021] env[63345]: DEBUG oslo_concurrency.lockutils [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.803s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.086068] env[63345]: INFO nova.compute.claims [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 681.213586] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52636ff3-5ddb-309e-6cff-9ab5fd2dad97, 'name': SearchDatastore_Task, 'duration_secs': 0.010788} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.213892] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.214141] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 681.214379] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.214529] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.214708] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 681.214964] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4156842b-2f78-46f9-847a-1a18b75d360c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.223532] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 681.223705] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 681.224413] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d787061-b8f3-4a23-974a-0f80192b7fc8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.229089] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Waiting for the task: (returnval){ [ 681.229089] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d4a69f-2bff-917c-8ca2-200cc936216e" [ 681.229089] env[63345]: _type = "Task" [ 681.229089] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.236271] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d4a69f-2bff-917c-8ca2-200cc936216e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.327711] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "refresh_cache-3e4e58bd-903b-4b3d-8be4-5678aab6c721" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.327922] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquired lock "refresh_cache-3e4e58bd-903b-4b3d-8be4-5678aab6c721" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.328032] env[63345]: DEBUG nova.network.neutron [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 681.551629] env[63345]: DEBUG oslo_concurrency.lockutils [req-eef72164-e143-45b5-b07e-5d20a4fbb75c req-00f20014-8b1a-4fe8-b654-85d8b050e4ee service nova] Releasing lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.590355] env[63345]: DEBUG nova.compute.utils [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 681.593625] env[63345]: DEBUG nova.compute.manager [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 681.593957] env[63345]: DEBUG nova.network.neutron [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 681.648026] env[63345]: DEBUG nova.policy [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92bccb8029854f0ea2cef0747513b8bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a1f3a565957a4316af1b8fa14f81e75a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 681.739314] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d4a69f-2bff-917c-8ca2-200cc936216e, 'name': SearchDatastore_Task, 'duration_secs': 0.01173} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.740200] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b1d1748-ba79-4a2d-bc7f-f55247ff5022 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.745617] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Waiting for the task: (returnval){ [ 681.745617] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52576839-b701-6896-903a-ec8dc3bc1bd2" [ 681.745617] env[63345]: _type = "Task" [ 681.745617] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.754775] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52576839-b701-6896-903a-ec8dc3bc1bd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.858228] env[63345]: DEBUG nova.network.neutron [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 681.902558] env[63345]: DEBUG nova.network.neutron [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Successfully created port: 829240e3-b053-450a-90f2-13fc659f12ca {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 682.073706] env[63345]: DEBUG nova.network.neutron [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Updating instance_info_cache with network_info: [{"id": "1e56115d-0d69-4b50-8607-b08677046c73", "address": "fa:16:3e:98:00:76", "network": {"id": "1888e7dd-bfd6-49d6-afb2-6ba1b22314cc", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-70760398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1f3a565957a4316af1b8fa14f81e75a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e56115d-0d", "ovs_interfaceid": "1e56115d-0d69-4b50-8607-b08677046c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.094218] env[63345]: DEBUG nova.compute.manager [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 682.256379] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52576839-b701-6896-903a-ec8dc3bc1bd2, 'name': SearchDatastore_Task, 'duration_secs': 0.009615} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.258879] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.259156] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 805f9143-a8d8-4995-a20d-3b10ef3ab599/805f9143-a8d8-4995-a20d-3b10ef3ab599.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 682.259986] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c495ce17-89ff-4a2c-9cd3-9a3700f776d1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.267235] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Waiting for the task: (returnval){ [ 682.267235] env[63345]: value = "task-1016766" [ 682.267235] env[63345]: _type = "Task" [ 682.267235] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.276724] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1016766, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.374305] env[63345]: DEBUG nova.compute.manager [req-aaff94cd-8d4b-465c-96ba-b5fcd20dfe71 req-ea0abebf-94e0-4a25-b0bc-aabd547e62c1 service nova] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Received event network-vif-plugged-1e56115d-0d69-4b50-8607-b08677046c73 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 682.374305] env[63345]: DEBUG oslo_concurrency.lockutils [req-aaff94cd-8d4b-465c-96ba-b5fcd20dfe71 req-ea0abebf-94e0-4a25-b0bc-aabd547e62c1 service nova] Acquiring lock "3e4e58bd-903b-4b3d-8be4-5678aab6c721-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.374305] env[63345]: DEBUG oslo_concurrency.lockutils [req-aaff94cd-8d4b-465c-96ba-b5fcd20dfe71 req-ea0abebf-94e0-4a25-b0bc-aabd547e62c1 service nova] Lock "3e4e58bd-903b-4b3d-8be4-5678aab6c721-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.374305] env[63345]: DEBUG oslo_concurrency.lockutils [req-aaff94cd-8d4b-465c-96ba-b5fcd20dfe71 req-ea0abebf-94e0-4a25-b0bc-aabd547e62c1 service nova] Lock "3e4e58bd-903b-4b3d-8be4-5678aab6c721-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.374524] env[63345]: DEBUG nova.compute.manager [req-aaff94cd-8d4b-465c-96ba-b5fcd20dfe71 req-ea0abebf-94e0-4a25-b0bc-aabd547e62c1 service nova] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] No waiting events found dispatching network-vif-plugged-1e56115d-0d69-4b50-8607-b08677046c73 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 682.374908] env[63345]: WARNING nova.compute.manager [req-aaff94cd-8d4b-465c-96ba-b5fcd20dfe71 req-ea0abebf-94e0-4a25-b0bc-aabd547e62c1 service nova] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Received unexpected event network-vif-plugged-1e56115d-0d69-4b50-8607-b08677046c73 for instance with vm_state building and task_state spawning. [ 682.374908] env[63345]: DEBUG nova.compute.manager [req-aaff94cd-8d4b-465c-96ba-b5fcd20dfe71 req-ea0abebf-94e0-4a25-b0bc-aabd547e62c1 service nova] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Received event network-changed-1e56115d-0d69-4b50-8607-b08677046c73 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 682.374908] env[63345]: DEBUG nova.compute.manager [req-aaff94cd-8d4b-465c-96ba-b5fcd20dfe71 req-ea0abebf-94e0-4a25-b0bc-aabd547e62c1 service nova] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Refreshing instance network info cache due to event network-changed-1e56115d-0d69-4b50-8607-b08677046c73. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 682.375134] env[63345]: DEBUG oslo_concurrency.lockutils [req-aaff94cd-8d4b-465c-96ba-b5fcd20dfe71 req-ea0abebf-94e0-4a25-b0bc-aabd547e62c1 service nova] Acquiring lock "refresh_cache-3e4e58bd-903b-4b3d-8be4-5678aab6c721" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.572705] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01cecada-9d80-4919-9ee3-661695cbfb68 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.576353] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Releasing lock "refresh_cache-3e4e58bd-903b-4b3d-8be4-5678aab6c721" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.577612] env[63345]: DEBUG nova.compute.manager [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Instance network_info: |[{"id": "1e56115d-0d69-4b50-8607-b08677046c73", "address": "fa:16:3e:98:00:76", "network": {"id": "1888e7dd-bfd6-49d6-afb2-6ba1b22314cc", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-70760398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1f3a565957a4316af1b8fa14f81e75a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e56115d-0d", "ovs_interfaceid": "1e56115d-0d69-4b50-8607-b08677046c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 682.577612] env[63345]: DEBUG oslo_concurrency.lockutils [req-aaff94cd-8d4b-465c-96ba-b5fcd20dfe71 req-ea0abebf-94e0-4a25-b0bc-aabd547e62c1 service nova] Acquired lock "refresh_cache-3e4e58bd-903b-4b3d-8be4-5678aab6c721" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.577789] env[63345]: DEBUG nova.network.neutron [req-aaff94cd-8d4b-465c-96ba-b5fcd20dfe71 req-ea0abebf-94e0-4a25-b0bc-aabd547e62c1 service nova] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Refreshing network info cache for port 1e56115d-0d69-4b50-8607-b08677046c73 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 682.579228] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:00:76', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0df968ae-c1ef-4009-a0f4-6f2e799c2fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e56115d-0d69-4b50-8607-b08677046c73', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 682.586522] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Creating folder: Project (a1f3a565957a4316af1b8fa14f81e75a). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 682.589668] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e51c03dc-6981-4fde-bada-df501527a299 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.592600] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14c51e0-fcca-40ca-844d-14bcf6329458 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.629935] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb374780-9c7c-4a4c-92e2-c9262e0af908 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.632059] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Created folder: Project (a1f3a565957a4316af1b8fa14f81e75a) in parent group-v225918. [ 682.632268] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Creating folder: Instances. Parent ref: group-v225964. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 682.632610] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a90d7379-3f3b-4596-a317-d34e8543fcba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.640964] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f6c9e8-265f-4fdc-a146-728e41ff350c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.646319] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Created folder: Instances in parent group-v225964. [ 682.646567] env[63345]: DEBUG oslo.service.loopingcall [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 682.647163] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 682.647393] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab652d10-f144-4c90-b985-1bf3615c050b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.671653] env[63345]: DEBUG nova.compute.provider_tree [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 682.677928] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 682.677928] env[63345]: value = "task-1016769" [ 682.677928] env[63345]: _type = "Task" [ 682.677928] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.686812] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016769, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.776791] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1016766, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462092} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.777076] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 805f9143-a8d8-4995-a20d-3b10ef3ab599/805f9143-a8d8-4995-a20d-3b10ef3ab599.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 682.777296] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 682.777551] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e0fa2a4-69c3-41c4-8fac-fa1b05f7d25a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.784259] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Waiting for the task: (returnval){ [ 682.784259] env[63345]: value = "task-1016770" [ 682.784259] env[63345]: _type = "Task" [ 682.784259] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.793027] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1016770, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.131287] env[63345]: DEBUG nova.compute.manager [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 683.154669] env[63345]: DEBUG nova.virt.hardware [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 683.154922] env[63345]: DEBUG nova.virt.hardware [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 683.155105] env[63345]: DEBUG nova.virt.hardware [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 683.155377] env[63345]: DEBUG nova.virt.hardware [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 683.155454] env[63345]: DEBUG nova.virt.hardware [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 683.155580] env[63345]: DEBUG nova.virt.hardware [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 683.155783] env[63345]: DEBUG nova.virt.hardware [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 683.155939] env[63345]: DEBUG nova.virt.hardware [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 683.156131] env[63345]: DEBUG nova.virt.hardware [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 683.156298] env[63345]: DEBUG nova.virt.hardware [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 683.156467] env[63345]: DEBUG nova.virt.hardware [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 683.157340] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e56b46-c30f-45d2-87a4-55c7733d272d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.165588] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208fe5d5-c473-4f07-8648-07d20ee72e8b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.179172] env[63345]: DEBUG nova.scheduler.client.report [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 683.192152] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016769, 'name': CreateVM_Task, 'duration_secs': 0.325275} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.192977] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 683.193335] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.193516] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.193855] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 683.194075] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-329997c4-8366-4b5b-a07c-7d7fa3850ba0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.198484] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 683.198484] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52be6cb9-fdaf-af5e-490a-1ab1f011561f" [ 683.198484] env[63345]: _type = "Task" [ 683.198484] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.207860] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52be6cb9-fdaf-af5e-490a-1ab1f011561f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.293972] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1016770, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062518} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.294310] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 683.294987] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c13bac-94e8-459f-abe1-b1e33bfe02ae {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.319178] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] 805f9143-a8d8-4995-a20d-3b10ef3ab599/805f9143-a8d8-4995-a20d-3b10ef3ab599.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 683.321940] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7c1b831-3a1b-445c-b383-91efa62ba814 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.341356] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Waiting for the task: (returnval){ [ 683.341356] env[63345]: value = "task-1016771" [ 683.341356] env[63345]: _type = "Task" [ 683.341356] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.349157] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1016771, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.417480] env[63345]: DEBUG nova.network.neutron [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Successfully updated port: 829240e3-b053-450a-90f2-13fc659f12ca {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 683.438275] env[63345]: DEBUG nova.network.neutron [req-aaff94cd-8d4b-465c-96ba-b5fcd20dfe71 req-ea0abebf-94e0-4a25-b0bc-aabd547e62c1 service nova] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Updated VIF entry in instance network info cache for port 1e56115d-0d69-4b50-8607-b08677046c73. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 683.438634] env[63345]: DEBUG nova.network.neutron [req-aaff94cd-8d4b-465c-96ba-b5fcd20dfe71 req-ea0abebf-94e0-4a25-b0bc-aabd547e62c1 service nova] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Updating instance_info_cache with network_info: [{"id": "1e56115d-0d69-4b50-8607-b08677046c73", "address": "fa:16:3e:98:00:76", "network": {"id": "1888e7dd-bfd6-49d6-afb2-6ba1b22314cc", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-70760398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1f3a565957a4316af1b8fa14f81e75a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e56115d-0d", "ovs_interfaceid": "1e56115d-0d69-4b50-8607-b08677046c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.685078] env[63345]: DEBUG oslo_concurrency.lockutils [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.600s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.685488] env[63345]: DEBUG nova.compute.manager [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 683.688225] env[63345]: DEBUG oslo_concurrency.lockutils [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.067s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.688420] env[63345]: DEBUG oslo_concurrency.lockutils [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.690488] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.090s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.692368] env[63345]: INFO nova.compute.claims [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 683.708855] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52be6cb9-fdaf-af5e-490a-1ab1f011561f, 'name': SearchDatastore_Task, 'duration_secs': 0.009455} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.709096] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 683.709343] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 683.709574] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.709949] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.709949] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 683.710183] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8b44c6b-9378-4dd6-9bc7-bbc6c4f3a7d7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.715888] env[63345]: INFO nova.scheduler.client.report [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Deleted allocations for instance ee31689b-bf0b-4737-86c7-5451c763e603 [ 683.720486] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 683.720715] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 683.721431] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-323c9ad0-03de-4dd2-ae8a-1148298b8c27 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.726740] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 683.726740] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5299a749-a4bc-2f57-9b0e-d4df8d816c60" [ 683.726740] env[63345]: _type = "Task" [ 683.726740] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.734768] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5299a749-a4bc-2f57-9b0e-d4df8d816c60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.850830] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1016771, 'name': ReconfigVM_Task, 'duration_secs': 0.290767} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.851061] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Reconfigured VM instance instance-00000022 to attach disk [datastore2] 805f9143-a8d8-4995-a20d-3b10ef3ab599/805f9143-a8d8-4995-a20d-3b10ef3ab599.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 683.851666] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-42a0f3f3-c9a2-4d84-95d9-d7c431c31122 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.858687] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Waiting for the task: (returnval){ [ 683.858687] env[63345]: value = "task-1016772" [ 683.858687] env[63345]: _type = "Task" [ 683.858687] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.865963] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1016772, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.923224] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "refresh_cache-e6bc8cb9-2f1a-49cb-974d-ea9a211126ee" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.923442] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquired lock "refresh_cache-e6bc8cb9-2f1a-49cb-974d-ea9a211126ee" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.923719] env[63345]: DEBUG nova.network.neutron [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 683.942367] env[63345]: DEBUG oslo_concurrency.lockutils [req-aaff94cd-8d4b-465c-96ba-b5fcd20dfe71 req-ea0abebf-94e0-4a25-b0bc-aabd547e62c1 service nova] Releasing lock "refresh_cache-3e4e58bd-903b-4b3d-8be4-5678aab6c721" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.195859] env[63345]: DEBUG nova.compute.utils [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 684.198995] env[63345]: DEBUG nova.compute.manager [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 684.199240] env[63345]: DEBUG nova.network.neutron [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 684.225043] env[63345]: DEBUG oslo_concurrency.lockutils [None req-524ef0bf-1ce4-44cc-b9ef-88a44732ad39 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "ee31689b-bf0b-4737-86c7-5451c763e603" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.217s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.237367] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5299a749-a4bc-2f57-9b0e-d4df8d816c60, 'name': SearchDatastore_Task, 'duration_secs': 0.009086} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.238195] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d46f89c7-1ba3-428e-8ad7-e17e54d901d7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.245863] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 684.245863] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52acd886-f8fa-1585-a19b-4c7814f67b8b" [ 684.245863] env[63345]: _type = "Task" [ 684.245863] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.251650] env[63345]: DEBUG nova.policy [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1d375a2853f64db7afbc2e4b59bb5703', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1a8f4cca8304eaead620b510eba103f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 684.258117] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52acd886-f8fa-1585-a19b-4c7814f67b8b, 'name': SearchDatastore_Task, 'duration_secs': 0.009825} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.258338] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.258587] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 3e4e58bd-903b-4b3d-8be4-5678aab6c721/3e4e58bd-903b-4b3d-8be4-5678aab6c721.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 684.259182] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c73202b2-a178-4c5c-b4fe-5e953ecdc3d5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.265881] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 684.265881] env[63345]: value = "task-1016773" [ 684.265881] env[63345]: _type = "Task" [ 684.265881] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.273604] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016773, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.370836] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1016772, 'name': Rename_Task, 'duration_secs': 0.147127} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.371527] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 684.371527] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55191cbd-3b47-4522-b2c3-5d2e421e8a3c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.379213] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Waiting for the task: (returnval){ [ 684.379213] env[63345]: value = "task-1016774" [ 684.379213] env[63345]: _type = "Task" [ 684.379213] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.387631] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1016774, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.474020] env[63345]: DEBUG nova.network.neutron [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 684.494685] env[63345]: DEBUG nova.compute.manager [req-44284ec3-6eea-45b2-b6ee-f9e46365e61a req-f671c7fa-5ccd-462d-9c4a-477d3cda946d service nova] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Received event network-vif-plugged-829240e3-b053-450a-90f2-13fc659f12ca {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 684.495174] env[63345]: DEBUG oslo_concurrency.lockutils [req-44284ec3-6eea-45b2-b6ee-f9e46365e61a req-f671c7fa-5ccd-462d-9c4a-477d3cda946d service nova] Acquiring lock "e6bc8cb9-2f1a-49cb-974d-ea9a211126ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.495501] env[63345]: DEBUG oslo_concurrency.lockutils [req-44284ec3-6eea-45b2-b6ee-f9e46365e61a req-f671c7fa-5ccd-462d-9c4a-477d3cda946d service nova] Lock "e6bc8cb9-2f1a-49cb-974d-ea9a211126ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.495770] env[63345]: DEBUG oslo_concurrency.lockutils [req-44284ec3-6eea-45b2-b6ee-f9e46365e61a req-f671c7fa-5ccd-462d-9c4a-477d3cda946d service nova] Lock "e6bc8cb9-2f1a-49cb-974d-ea9a211126ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.495991] env[63345]: DEBUG nova.compute.manager [req-44284ec3-6eea-45b2-b6ee-f9e46365e61a req-f671c7fa-5ccd-462d-9c4a-477d3cda946d service nova] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] No waiting events found dispatching network-vif-plugged-829240e3-b053-450a-90f2-13fc659f12ca {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 684.496234] env[63345]: WARNING nova.compute.manager [req-44284ec3-6eea-45b2-b6ee-f9e46365e61a req-f671c7fa-5ccd-462d-9c4a-477d3cda946d service nova] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Received unexpected event network-vif-plugged-829240e3-b053-450a-90f2-13fc659f12ca for instance with vm_state building and task_state spawning. [ 684.496452] env[63345]: DEBUG nova.compute.manager [req-44284ec3-6eea-45b2-b6ee-f9e46365e61a req-f671c7fa-5ccd-462d-9c4a-477d3cda946d service nova] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Received event network-changed-829240e3-b053-450a-90f2-13fc659f12ca {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 684.496659] env[63345]: DEBUG nova.compute.manager [req-44284ec3-6eea-45b2-b6ee-f9e46365e61a req-f671c7fa-5ccd-462d-9c4a-477d3cda946d service nova] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Refreshing instance network info cache due to event network-changed-829240e3-b053-450a-90f2-13fc659f12ca. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 684.496963] env[63345]: DEBUG oslo_concurrency.lockutils [req-44284ec3-6eea-45b2-b6ee-f9e46365e61a req-f671c7fa-5ccd-462d-9c4a-477d3cda946d service nova] Acquiring lock "refresh_cache-e6bc8cb9-2f1a-49cb-974d-ea9a211126ee" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.650907] env[63345]: DEBUG nova.network.neutron [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Updating instance_info_cache with network_info: [{"id": "829240e3-b053-450a-90f2-13fc659f12ca", "address": "fa:16:3e:c8:5d:2a", "network": {"id": "1888e7dd-bfd6-49d6-afb2-6ba1b22314cc", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-70760398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1f3a565957a4316af1b8fa14f81e75a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap829240e3-b0", "ovs_interfaceid": "829240e3-b053-450a-90f2-13fc659f12ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.704265] env[63345]: DEBUG nova.compute.manager [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 684.786586] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016773, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505021} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.786906] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 3e4e58bd-903b-4b3d-8be4-5678aab6c721/3e4e58bd-903b-4b3d-8be4-5678aab6c721.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 684.787169] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 684.787469] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9da3bbc2-d819-4af3-8c66-b3cc92cb9d66 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.797151] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 684.797151] env[63345]: value = "task-1016775" [ 684.797151] env[63345]: _type = "Task" [ 684.797151] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.806359] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016775, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.892123] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1016774, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.910530] env[63345]: DEBUG nova.network.neutron [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Successfully created port: 49047c62-1eed-4563-b10d-31b82cc302ff {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 685.159267] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Releasing lock "refresh_cache-e6bc8cb9-2f1a-49cb-974d-ea9a211126ee" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.159267] env[63345]: DEBUG nova.compute.manager [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Instance network_info: |[{"id": "829240e3-b053-450a-90f2-13fc659f12ca", "address": "fa:16:3e:c8:5d:2a", "network": {"id": "1888e7dd-bfd6-49d6-afb2-6ba1b22314cc", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-70760398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1f3a565957a4316af1b8fa14f81e75a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap829240e3-b0", "ovs_interfaceid": "829240e3-b053-450a-90f2-13fc659f12ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 685.161213] env[63345]: DEBUG oslo_concurrency.lockutils [req-44284ec3-6eea-45b2-b6ee-f9e46365e61a req-f671c7fa-5ccd-462d-9c4a-477d3cda946d service nova] Acquired lock "refresh_cache-e6bc8cb9-2f1a-49cb-974d-ea9a211126ee" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.161472] env[63345]: DEBUG nova.network.neutron [req-44284ec3-6eea-45b2-b6ee-f9e46365e61a req-f671c7fa-5ccd-462d-9c4a-477d3cda946d service nova] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Refreshing network info cache for port 829240e3-b053-450a-90f2-13fc659f12ca {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 685.162605] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:5d:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0df968ae-c1ef-4009-a0f4-6f2e799c2fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '829240e3-b053-450a-90f2-13fc659f12ca', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 685.176111] env[63345]: DEBUG oslo.service.loopingcall [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 685.183688] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 685.184023] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-673398ce-debf-4910-8f46-8f2e4402083b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.209227] env[63345]: INFO nova.virt.block_device [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Booting with volume a6bb0405-c6c3-4109-b8e1-29b372ad3058 at /dev/sda [ 685.215774] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 685.215774] env[63345]: value = "task-1016776" [ 685.215774] env[63345]: _type = "Task" [ 685.215774] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.236180] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016776, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.278610] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b95e462a-a4bc-49c0-b6d3-f233716a3469 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.292815] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35cd3b1e-3710-4bc5-9bce-aee713a00b7c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.319026] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016775, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068965} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.319575] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 685.320388] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81acfb4f-1276-458b-ac56-5ac12ee5caae {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.335940] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-389ade2e-8693-4c62-905a-0fe30ec3fc7b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.338735] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a441d8e3-2e3f-4c4f-aa3b-4d61e0928ab3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.359424] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] 3e4e58bd-903b-4b3d-8be4-5678aab6c721/3e4e58bd-903b-4b3d-8be4-5678aab6c721.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 685.361287] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b248314-d796-46c4-9af9-42d3b21e5a6e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.383247] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a730c8-7587-4ebd-ac7f-9d30a94656de {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.396681] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 685.396681] env[63345]: value = "task-1016777" [ 685.396681] env[63345]: _type = "Task" [ 685.396681] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.400815] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ed46db-91d4-47ca-93e7-1c7b9232e0f0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.459881] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e50d003-1bd5-425c-8265-2eab8f35a94c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.463795] env[63345]: DEBUG oslo_vmware.api [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1016774, 'name': PowerOnVM_Task, 'duration_secs': 0.745411} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.468358] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 685.468670] env[63345]: INFO nova.compute.manager [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Took 7.46 seconds to spawn the instance on the hypervisor. [ 685.468939] env[63345]: DEBUG nova.compute.manager [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 685.474015] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1d539d-5a4e-4893-b055-985e134ec247 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.478866] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6bcf666-4cdf-40f4-bc1c-68ef2cb264b5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.482008] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016777, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.486681] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67bf9520-2e1a-42ca-bf12-b2a8d08c80ab {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.499232] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5b191a-822e-4e97-a130-fc682821ee3e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.512257] env[63345]: DEBUG nova.virt.block_device [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Updating existing volume attachment record: 1a63cf65-81c4-4905-bcfb-a42d5269d7e8 {{(pid=63345) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 685.516604] env[63345]: DEBUG nova.compute.provider_tree [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 685.573000] env[63345]: DEBUG nova.network.neutron [req-44284ec3-6eea-45b2-b6ee-f9e46365e61a req-f671c7fa-5ccd-462d-9c4a-477d3cda946d service nova] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Updated VIF entry in instance network info cache for port 829240e3-b053-450a-90f2-13fc659f12ca. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 685.573000] env[63345]: DEBUG nova.network.neutron [req-44284ec3-6eea-45b2-b6ee-f9e46365e61a req-f671c7fa-5ccd-462d-9c4a-477d3cda946d service nova] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Updating instance_info_cache with network_info: [{"id": "829240e3-b053-450a-90f2-13fc659f12ca", "address": "fa:16:3e:c8:5d:2a", "network": {"id": "1888e7dd-bfd6-49d6-afb2-6ba1b22314cc", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-70760398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1f3a565957a4316af1b8fa14f81e75a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap829240e3-b0", "ovs_interfaceid": "829240e3-b053-450a-90f2-13fc659f12ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.724197] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016776, 'name': CreateVM_Task, 'duration_secs': 0.38326} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.724529] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 685.725061] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 685.725236] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.725551] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 685.725802] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c02788d-5ebf-4cfa-9a28-c0b3ed097227 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.730795] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 685.730795] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529520cf-ae2b-734b-fbbf-36d653651564" [ 685.730795] env[63345]: _type = "Task" [ 685.730795] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.739265] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]529520cf-ae2b-734b-fbbf-36d653651564, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.914454] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016777, 'name': ReconfigVM_Task, 'duration_secs': 0.362144} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.914760] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Reconfigured VM instance instance-00000023 to attach disk [datastore2] 3e4e58bd-903b-4b3d-8be4-5678aab6c721/3e4e58bd-903b-4b3d-8be4-5678aab6c721.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 685.915390] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c2253cb-fd27-482f-9eea-e8408a0eaf99 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.921539] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 685.921539] env[63345]: value = "task-1016778" [ 685.921539] env[63345]: _type = "Task" [ 685.921539] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.932415] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016778, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.009697] env[63345]: INFO nova.compute.manager [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Took 42.14 seconds to build instance. [ 686.019701] env[63345]: DEBUG nova.scheduler.client.report [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 686.075284] env[63345]: DEBUG oslo_concurrency.lockutils [req-44284ec3-6eea-45b2-b6ee-f9e46365e61a req-f671c7fa-5ccd-462d-9c4a-477d3cda946d service nova] Releasing lock "refresh_cache-e6bc8cb9-2f1a-49cb-974d-ea9a211126ee" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.243029] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]529520cf-ae2b-734b-fbbf-36d653651564, 'name': SearchDatastore_Task, 'duration_secs': 0.026388} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.243029] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.243029] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 686.243029] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.243250] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.243250] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 686.243346] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1814485d-4c1b-48f9-9e76-633887b43335 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.253292] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 686.253478] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 686.254206] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7af4656d-2a7e-4e63-961b-ad2106db1bb7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.259388] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 686.259388] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]527bbd24-d0f4-17bc-5984-0170f2ea20c8" [ 686.259388] env[63345]: _type = "Task" [ 686.259388] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.266838] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]527bbd24-d0f4-17bc-5984-0170f2ea20c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.431991] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016778, 'name': Rename_Task, 'duration_secs': 0.150404} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.432539] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 686.432906] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-25a980c5-824b-400f-8d79-9528d8a0bc7d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.441689] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 686.441689] env[63345]: value = "task-1016779" [ 686.441689] env[63345]: _type = "Task" [ 686.441689] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.450363] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016779, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.513093] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e2467-50a3-4df0-9c7e-652f9d43437c tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Lock "805f9143-a8d8-4995-a20d-3b10ef3ab599" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 119.228s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.525552] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.835s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.526395] env[63345]: DEBUG nova.compute.manager [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 686.530219] env[63345]: DEBUG oslo_concurrency.lockutils [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.992s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.532523] env[63345]: INFO nova.compute.claims [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 686.724070] env[63345]: DEBUG nova.compute.manager [req-f7f112e5-87bd-4783-a2d2-6502d071e49d req-3cdb1ebe-d889-4b15-9ba2-0cef24aca683 service nova] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Received event network-vif-plugged-49047c62-1eed-4563-b10d-31b82cc302ff {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 686.724311] env[63345]: DEBUG oslo_concurrency.lockutils [req-f7f112e5-87bd-4783-a2d2-6502d071e49d req-3cdb1ebe-d889-4b15-9ba2-0cef24aca683 service nova] Acquiring lock "7bef089c-e93b-4ba6-a683-4e076489f92a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.724566] env[63345]: DEBUG oslo_concurrency.lockutils [req-f7f112e5-87bd-4783-a2d2-6502d071e49d req-3cdb1ebe-d889-4b15-9ba2-0cef24aca683 service nova] Lock "7bef089c-e93b-4ba6-a683-4e076489f92a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.724668] env[63345]: DEBUG oslo_concurrency.lockutils [req-f7f112e5-87bd-4783-a2d2-6502d071e49d req-3cdb1ebe-d889-4b15-9ba2-0cef24aca683 service nova] Lock "7bef089c-e93b-4ba6-a683-4e076489f92a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.724833] env[63345]: DEBUG nova.compute.manager [req-f7f112e5-87bd-4783-a2d2-6502d071e49d req-3cdb1ebe-d889-4b15-9ba2-0cef24aca683 service nova] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] No waiting events found dispatching network-vif-plugged-49047c62-1eed-4563-b10d-31b82cc302ff {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 686.725009] env[63345]: WARNING nova.compute.manager [req-f7f112e5-87bd-4783-a2d2-6502d071e49d req-3cdb1ebe-d889-4b15-9ba2-0cef24aca683 service nova] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Received unexpected event network-vif-plugged-49047c62-1eed-4563-b10d-31b82cc302ff for instance with vm_state building and task_state block_device_mapping. [ 686.773587] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]527bbd24-d0f4-17bc-5984-0170f2ea20c8, 'name': SearchDatastore_Task, 'duration_secs': 0.029484} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.773587] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d17fa5d-4b37-4fad-8349-c2b5e04cd427 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.774678] env[63345]: DEBUG nova.network.neutron [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Successfully updated port: 49047c62-1eed-4563-b10d-31b82cc302ff {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 686.781165] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 686.781165] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52a7b6ab-c132-2c93-aa34-99e7c4360c9a" [ 686.781165] env[63345]: _type = "Task" [ 686.781165] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.798105] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a7b6ab-c132-2c93-aa34-99e7c4360c9a, 'name': SearchDatastore_Task, 'duration_secs': 0.01027} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.798761] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.799255] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] e6bc8cb9-2f1a-49cb-974d-ea9a211126ee/e6bc8cb9-2f1a-49cb-974d-ea9a211126ee.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 686.799611] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6703a0e0-f95f-444e-a758-067ff98caf8d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.811921] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 686.811921] env[63345]: value = "task-1016780" [ 686.811921] env[63345]: _type = "Task" [ 686.811921] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.822654] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016780, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.867433] env[63345]: DEBUG oslo_concurrency.lockutils [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "b4a7d6dd-98dc-49d8-b344-1878cd5a3f51" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.867433] env[63345]: DEBUG oslo_concurrency.lockutils [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "b4a7d6dd-98dc-49d8-b344-1878cd5a3f51" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.867433] env[63345]: DEBUG oslo_concurrency.lockutils [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "b4a7d6dd-98dc-49d8-b344-1878cd5a3f51-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.867800] env[63345]: DEBUG oslo_concurrency.lockutils [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "b4a7d6dd-98dc-49d8-b344-1878cd5a3f51-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.867800] env[63345]: DEBUG oslo_concurrency.lockutils [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "b4a7d6dd-98dc-49d8-b344-1878cd5a3f51-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.871023] env[63345]: INFO nova.compute.manager [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Terminating instance [ 686.949542] env[63345]: DEBUG oslo_vmware.api [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016779, 'name': PowerOnVM_Task, 'duration_secs': 0.477993} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.950096] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 686.950309] env[63345]: INFO nova.compute.manager [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Took 6.39 seconds to spawn the instance on the hypervisor. [ 686.950489] env[63345]: DEBUG nova.compute.manager [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 686.951644] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db25d8b3-52c2-4166-9ba2-91bd3fec4417 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.015339] env[63345]: DEBUG nova.compute.manager [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 687.036804] env[63345]: DEBUG nova.compute.utils [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 687.038468] env[63345]: DEBUG nova.compute.manager [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 687.039494] env[63345]: DEBUG nova.network.neutron [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 687.046819] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "28caa5f5-141a-4ef9-abb3-33a1973d99cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.047071] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "28caa5f5-141a-4ef9-abb3-33a1973d99cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.047275] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "28caa5f5-141a-4ef9-abb3-33a1973d99cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.047453] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "28caa5f5-141a-4ef9-abb3-33a1973d99cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.047618] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "28caa5f5-141a-4ef9-abb3-33a1973d99cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.050055] env[63345]: INFO nova.compute.manager [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Terminating instance [ 687.115497] env[63345]: DEBUG nova.policy [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92bccb8029854f0ea2cef0747513b8bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a1f3a565957a4316af1b8fa14f81e75a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 687.279661] env[63345]: DEBUG oslo_concurrency.lockutils [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Acquiring lock "refresh_cache-7bef089c-e93b-4ba6-a683-4e076489f92a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.279994] env[63345]: DEBUG oslo_concurrency.lockutils [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Acquired lock "refresh_cache-7bef089c-e93b-4ba6-a683-4e076489f92a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.280282] env[63345]: DEBUG nova.network.neutron [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 687.322728] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016780, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478527} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.323475] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] e6bc8cb9-2f1a-49cb-974d-ea9a211126ee/e6bc8cb9-2f1a-49cb-974d-ea9a211126ee.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 687.323475] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 687.323673] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df6c7b39-706f-41ec-9164-f69c454d289b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.331380] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 687.331380] env[63345]: value = "task-1016781" [ 687.331380] env[63345]: _type = "Task" [ 687.331380] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.341417] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016781, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.374800] env[63345]: DEBUG nova.compute.manager [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 687.375999] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 687.376522] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae677f3-4a67-4106-89a2-a6347271917b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.385411] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 687.385654] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0c46ed2-ee7a-4a3d-b049-3dfae8f69a24 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.392862] env[63345]: DEBUG oslo_vmware.api [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 687.392862] env[63345]: value = "task-1016782" [ 687.392862] env[63345]: _type = "Task" [ 687.392862] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.401481] env[63345]: DEBUG oslo_vmware.api [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016782, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.461170] env[63345]: DEBUG nova.network.neutron [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Successfully created port: a5abe431-00eb-4c22-81e4-d160cc76d360 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 687.471295] env[63345]: INFO nova.compute.manager [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Took 41.50 seconds to build instance. [ 687.542466] env[63345]: DEBUG nova.compute.manager [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 687.550691] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.554028] env[63345]: DEBUG nova.compute.manager [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 687.554286] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 687.555089] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4db486-93f7-4ab1-a80a-8a4a5e438a69 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.567565] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 687.567875] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f5ffbc59-f304-4557-a392-9aff490b3940 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.577517] env[63345]: DEBUG oslo_vmware.api [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 687.577517] env[63345]: value = "task-1016783" [ 687.577517] env[63345]: _type = "Task" [ 687.577517] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.592826] env[63345]: DEBUG oslo_vmware.api [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016783, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.637131] env[63345]: DEBUG nova.compute.manager [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 687.638276] env[63345]: DEBUG nova.virt.hardware [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 687.638276] env[63345]: DEBUG nova.virt.hardware [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 687.638412] env[63345]: DEBUG nova.virt.hardware [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 687.638664] env[63345]: DEBUG nova.virt.hardware [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 687.638848] env[63345]: DEBUG nova.virt.hardware [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 687.639011] env[63345]: DEBUG nova.virt.hardware [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 687.643017] env[63345]: DEBUG nova.virt.hardware [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 687.643017] env[63345]: DEBUG nova.virt.hardware [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 687.643017] env[63345]: DEBUG nova.virt.hardware [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 687.643017] env[63345]: DEBUG nova.virt.hardware [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 687.643017] env[63345]: DEBUG nova.virt.hardware [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 687.643256] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b7d232-329c-4d60-84be-7fc0d7a0d0b2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.650705] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f18a014-ddbb-411b-8c1e-396678d4d80e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.831307] env[63345]: DEBUG nova.network.neutron [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 687.843685] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016781, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06827} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.843961] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 687.844773] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9c34f8-34d5-45ac-8c65-bb865a28d1f8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.874227] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] e6bc8cb9-2f1a-49cb-974d-ea9a211126ee/e6bc8cb9-2f1a-49cb-974d-ea9a211126ee.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 687.879242] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76f2fed5-cbf8-4c8f-83ab-dc53d5726684 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.905256] env[63345]: DEBUG oslo_vmware.api [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016782, 'name': PowerOffVM_Task, 'duration_secs': 0.423862} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.909153] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 687.909935] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 687.909935] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 687.909935] env[63345]: value = "task-1016784" [ 687.909935] env[63345]: _type = "Task" [ 687.909935] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.911089] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c4848c1-8c83-409a-bc1d-cbd0d2d69590 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.923257] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016784, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.973016] env[63345]: DEBUG nova.compute.manager [req-9d70488a-9a8a-4b4d-a129-12046c0f0ef3 req-129e6ba9-8341-492c-a71b-65c8a553a189 service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Received event network-changed-0ae421d2-83f3-4520-8a37-01cb6a91a3f5 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 687.973221] env[63345]: DEBUG nova.compute.manager [req-9d70488a-9a8a-4b4d-a129-12046c0f0ef3 req-129e6ba9-8341-492c-a71b-65c8a553a189 service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Refreshing instance network info cache due to event network-changed-0ae421d2-83f3-4520-8a37-01cb6a91a3f5. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 687.973427] env[63345]: DEBUG oslo_concurrency.lockutils [req-9d70488a-9a8a-4b4d-a129-12046c0f0ef3 req-129e6ba9-8341-492c-a71b-65c8a553a189 service nova] Acquiring lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.973564] env[63345]: DEBUG oslo_concurrency.lockutils [req-9d70488a-9a8a-4b4d-a129-12046c0f0ef3 req-129e6ba9-8341-492c-a71b-65c8a553a189 service nova] Acquired lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.973715] env[63345]: DEBUG nova.network.neutron [req-9d70488a-9a8a-4b4d-a129-12046c0f0ef3 req-129e6ba9-8341-492c-a71b-65c8a553a189 service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Refreshing network info cache for port 0ae421d2-83f3-4520-8a37-01cb6a91a3f5 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 687.975110] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d256bcf8-fbe6-405a-b52c-977f656dc124 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "3e4e58bd-903b-4b3d-8be4-5678aab6c721" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.671s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.008518] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 688.008518] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 688.008518] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Deleting the datastore file [datastore2] b4a7d6dd-98dc-49d8-b344-1878cd5a3f51 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 688.008766] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3bc194b8-f5e9-4f7f-9f5b-41bc239f2292 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.015285] env[63345]: DEBUG oslo_vmware.api [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 688.015285] env[63345]: value = "task-1016786" [ 688.015285] env[63345]: _type = "Task" [ 688.015285] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.030532] env[63345]: DEBUG oslo_vmware.api [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016786, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.031856] env[63345]: DEBUG nova.network.neutron [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Updating instance_info_cache with network_info: [{"id": "49047c62-1eed-4563-b10d-31b82cc302ff", "address": "fa:16:3e:e8:a3:50", "network": {"id": "256a54f6-a458-4574-98b2-7ae39111a4e1", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-542895641-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a8f4cca8304eaead620b510eba103f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43798f54-0c07-4417-a23f-58bb6b7e204b", "external-id": "nsx-vlan-transportzone-571", "segmentation_id": 571, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49047c62-1e", "ovs_interfaceid": "49047c62-1eed-4563-b10d-31b82cc302ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.091251] env[63345]: DEBUG oslo_vmware.api [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016783, 'name': PowerOffVM_Task, 'duration_secs': 0.254843} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.092253] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 688.092436] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 688.096130] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-96a19eec-a415-4035-824d-81581f0c4114 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.160804] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efee888e-ea63-4f51-94e1-5b4de8fda3ca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.169108] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ecd0656-4fe8-48ef-a92e-c470c804b2f9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.174246] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 688.174529] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 688.174822] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Deleting the datastore file [datastore2] 28caa5f5-141a-4ef9-abb3-33a1973d99cf {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 688.175523] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c11d24be-7512-4a5f-8370-b70486901423 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.204659] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55965fc5-d1e1-4331-87e7-f713f8de2d40 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.207303] env[63345]: DEBUG oslo_vmware.api [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for the task: (returnval){ [ 688.207303] env[63345]: value = "task-1016788" [ 688.207303] env[63345]: _type = "Task" [ 688.207303] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.213897] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59632ab-aa6b-42f9-835d-8bd5e035a625 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.220398] env[63345]: DEBUG oslo_vmware.api [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.230937] env[63345]: DEBUG nova.compute.provider_tree [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 688.422222] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016784, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.479691] env[63345]: DEBUG nova.compute.manager [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 688.527034] env[63345]: DEBUG oslo_vmware.api [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016786, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.534997] env[63345]: DEBUG oslo_concurrency.lockutils [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Releasing lock "refresh_cache-7bef089c-e93b-4ba6-a683-4e076489f92a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.535328] env[63345]: DEBUG nova.compute.manager [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Instance network_info: |[{"id": "49047c62-1eed-4563-b10d-31b82cc302ff", "address": "fa:16:3e:e8:a3:50", "network": {"id": "256a54f6-a458-4574-98b2-7ae39111a4e1", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-542895641-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a8f4cca8304eaead620b510eba103f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43798f54-0c07-4417-a23f-58bb6b7e204b", "external-id": "nsx-vlan-transportzone-571", "segmentation_id": 571, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49047c62-1e", "ovs_interfaceid": "49047c62-1eed-4563-b10d-31b82cc302ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 688.535780] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:a3:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43798f54-0c07-4417-a23f-58bb6b7e204b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '49047c62-1eed-4563-b10d-31b82cc302ff', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 688.543935] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Creating folder: Project (f1a8f4cca8304eaead620b510eba103f). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 688.544569] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46be3dd4-cbd2-4265-9f57-cda42a37e4f8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.559068] env[63345]: DEBUG nova.compute.manager [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 688.563056] env[63345]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 688.563056] env[63345]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63345) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 688.563809] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Folder already exists: Project (f1a8f4cca8304eaead620b510eba103f). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 688.563809] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Creating folder: Instances. Parent ref: group-v225927. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 688.566641] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75257b94-76d4-4151-8a01-18edb4b0f175 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.576304] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Created folder: Instances in parent group-v225927. [ 688.577131] env[63345]: DEBUG oslo.service.loopingcall [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 688.577131] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 688.577131] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e21e8ac-5303-496a-bd7d-be4bdc900fd6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.601087] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 688.601087] env[63345]: value = "task-1016791" [ 688.601087] env[63345]: _type = "Task" [ 688.601087] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.603448] env[63345]: DEBUG nova.virt.hardware [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 688.603681] env[63345]: DEBUG nova.virt.hardware [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 688.603838] env[63345]: DEBUG nova.virt.hardware [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 688.604026] env[63345]: DEBUG nova.virt.hardware [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 688.604183] env[63345]: DEBUG nova.virt.hardware [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 688.604328] env[63345]: DEBUG nova.virt.hardware [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 688.604533] env[63345]: DEBUG nova.virt.hardware [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 688.604694] env[63345]: DEBUG nova.virt.hardware [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 688.604859] env[63345]: DEBUG nova.virt.hardware [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 688.605030] env[63345]: DEBUG nova.virt.hardware [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 688.605207] env[63345]: DEBUG nova.virt.hardware [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 688.607976] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f9ff80-7c47-4838-8fd1-c5fe502ca19b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.618247] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016791, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.621141] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20622a1a-3ff3-44df-92c6-93432f03f770 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.717243] env[63345]: DEBUG oslo_vmware.api [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.734233] env[63345]: DEBUG nova.network.neutron [req-9d70488a-9a8a-4b4d-a129-12046c0f0ef3 req-129e6ba9-8341-492c-a71b-65c8a553a189 service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Updated VIF entry in instance network info cache for port 0ae421d2-83f3-4520-8a37-01cb6a91a3f5. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 688.734645] env[63345]: DEBUG nova.network.neutron [req-9d70488a-9a8a-4b4d-a129-12046c0f0ef3 req-129e6ba9-8341-492c-a71b-65c8a553a189 service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Updating instance_info_cache with network_info: [{"id": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "address": "fa:16:3e:3e:4a:12", "network": {"id": "a7e35920-e04b-4676-9e3a-a322585417c0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1167528582-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87516cd599534b94801951669a97a9e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ae421d2-83", "ovs_interfaceid": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.757459] env[63345]: ERROR nova.scheduler.client.report [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [req-99517182-9baa-4039-a9a0-29a11c154780] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-99517182-9baa-4039-a9a0-29a11c154780"}]} [ 688.779661] env[63345]: DEBUG nova.scheduler.client.report [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 688.793461] env[63345]: DEBUG nova.compute.manager [req-97db5aaa-62ce-4633-a324-9ff4a8641bdf req-7636b71e-f924-4c0e-8164-00b9142c7d32 service nova] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Received event network-changed-49047c62-1eed-4563-b10d-31b82cc302ff {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 688.793652] env[63345]: DEBUG nova.compute.manager [req-97db5aaa-62ce-4633-a324-9ff4a8641bdf req-7636b71e-f924-4c0e-8164-00b9142c7d32 service nova] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Refreshing instance network info cache due to event network-changed-49047c62-1eed-4563-b10d-31b82cc302ff. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 688.793868] env[63345]: DEBUG oslo_concurrency.lockutils [req-97db5aaa-62ce-4633-a324-9ff4a8641bdf req-7636b71e-f924-4c0e-8164-00b9142c7d32 service nova] Acquiring lock "refresh_cache-7bef089c-e93b-4ba6-a683-4e076489f92a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 688.794123] env[63345]: DEBUG oslo_concurrency.lockutils [req-97db5aaa-62ce-4633-a324-9ff4a8641bdf req-7636b71e-f924-4c0e-8164-00b9142c7d32 service nova] Acquired lock "refresh_cache-7bef089c-e93b-4ba6-a683-4e076489f92a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.794330] env[63345]: DEBUG nova.network.neutron [req-97db5aaa-62ce-4633-a324-9ff4a8641bdf req-7636b71e-f924-4c0e-8164-00b9142c7d32 service nova] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Refreshing network info cache for port 49047c62-1eed-4563-b10d-31b82cc302ff {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 688.796096] env[63345]: DEBUG nova.scheduler.client.report [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 688.796296] env[63345]: DEBUG nova.compute.provider_tree [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 688.808163] env[63345]: DEBUG nova.scheduler.client.report [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 688.826477] env[63345]: DEBUG nova.scheduler.client.report [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 688.923447] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016784, 'name': ReconfigVM_Task, 'duration_secs': 0.750137} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.923447] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Reconfigured VM instance instance-00000024 to attach disk [datastore2] e6bc8cb9-2f1a-49cb-974d-ea9a211126ee/e6bc8cb9-2f1a-49cb-974d-ea9a211126ee.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 688.926795] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-797e3998-6ee6-44cf-8326-2fc4d6a48ab1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.934264] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 688.934264] env[63345]: value = "task-1016792" [ 688.934264] env[63345]: _type = "Task" [ 688.934264] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.945179] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016792, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.996332] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 689.026445] env[63345]: DEBUG oslo_vmware.api [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016786, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.567551} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.029016] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 689.029211] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 689.029396] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 689.029626] env[63345]: INFO nova.compute.manager [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Took 1.65 seconds to destroy the instance on the hypervisor. [ 689.029812] env[63345]: DEBUG oslo.service.loopingcall [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 689.030236] env[63345]: DEBUG nova.compute.manager [-] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 689.030336] env[63345]: DEBUG nova.network.neutron [-] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 689.120833] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016791, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.217518] env[63345]: DEBUG oslo_vmware.api [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Task: {'id': task-1016788, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.599338} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.217601] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 689.217786] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 689.217918] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 689.218152] env[63345]: INFO nova.compute.manager [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Took 1.66 seconds to destroy the instance on the hypervisor. [ 689.218417] env[63345]: DEBUG oslo.service.loopingcall [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 689.218554] env[63345]: DEBUG nova.compute.manager [-] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 689.218655] env[63345]: DEBUG nova.network.neutron [-] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 689.239557] env[63345]: DEBUG oslo_concurrency.lockutils [req-9d70488a-9a8a-4b4d-a129-12046c0f0ef3 req-129e6ba9-8341-492c-a71b-65c8a553a189 service nova] Releasing lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 689.296236] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7e1927-caf5-44be-9ec0-522638ef28f8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.307660] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3975a7-54a9-4fea-b4ee-7dc6becd4b43 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.344788] env[63345]: DEBUG nova.network.neutron [-] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.349097] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f40e4654-12c7-4241-b94b-ddc2818168c2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.355302] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ae8ecb-297c-4d1d-b96f-9c3f979afde6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.370429] env[63345]: DEBUG nova.compute.provider_tree [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.449048] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016792, 'name': Rename_Task, 'duration_secs': 0.346118} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.449335] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 689.449570] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eeaa94bf-590a-41f1-b7b2-bbd49a2af00e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.457236] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 689.457236] env[63345]: value = "task-1016793" [ 689.457236] env[63345]: _type = "Task" [ 689.457236] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.464702] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016793, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.527600] env[63345]: DEBUG nova.network.neutron [req-97db5aaa-62ce-4633-a324-9ff4a8641bdf req-7636b71e-f924-4c0e-8164-00b9142c7d32 service nova] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Updated VIF entry in instance network info cache for port 49047c62-1eed-4563-b10d-31b82cc302ff. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 689.528366] env[63345]: DEBUG nova.network.neutron [req-97db5aaa-62ce-4633-a324-9ff4a8641bdf req-7636b71e-f924-4c0e-8164-00b9142c7d32 service nova] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Updating instance_info_cache with network_info: [{"id": "49047c62-1eed-4563-b10d-31b82cc302ff", "address": "fa:16:3e:e8:a3:50", "network": {"id": "256a54f6-a458-4574-98b2-7ae39111a4e1", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-542895641-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a8f4cca8304eaead620b510eba103f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43798f54-0c07-4417-a23f-58bb6b7e204b", "external-id": "nsx-vlan-transportzone-571", "segmentation_id": 571, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49047c62-1e", "ovs_interfaceid": "49047c62-1eed-4563-b10d-31b82cc302ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.601090] env[63345]: DEBUG nova.network.neutron [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Successfully updated port: a5abe431-00eb-4c22-81e4-d160cc76d360 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 689.625174] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016791, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.849969] env[63345]: INFO nova.compute.manager [-] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Took 0.82 seconds to deallocate network for instance. [ 689.873377] env[63345]: DEBUG nova.scheduler.client.report [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 689.966648] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016793, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.999483] env[63345]: DEBUG nova.compute.manager [req-5abbdec9-0d1b-4215-9213-e6d533b89531 req-b3264264-ed22-4890-b518-4d651ac5bc9c service nova] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Received event network-vif-deleted-1634e3f5-396d-4cf4-a5e2-e985d04c1391 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 690.030310] env[63345]: DEBUG oslo_concurrency.lockutils [req-97db5aaa-62ce-4633-a324-9ff4a8641bdf req-7636b71e-f924-4c0e-8164-00b9142c7d32 service nova] Releasing lock "refresh_cache-7bef089c-e93b-4ba6-a683-4e076489f92a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 690.104305] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "refresh_cache-0d5cb238-2d25-47b1-8ce6-15a20836dbfb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.104478] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquired lock "refresh_cache-0d5cb238-2d25-47b1-8ce6-15a20836dbfb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.104537] env[63345]: DEBUG nova.network.neutron [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 690.123037] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016791, 'name': CreateVM_Task, 'duration_secs': 1.438565} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.123037] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 690.123826] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-225930', 'volume_id': 'a6bb0405-c6c3-4109-b8e1-29b372ad3058', 'name': 'volume-a6bb0405-c6c3-4109-b8e1-29b372ad3058', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7bef089c-e93b-4ba6-a683-4e076489f92a', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6bb0405-c6c3-4109-b8e1-29b372ad3058', 'serial': 'a6bb0405-c6c3-4109-b8e1-29b372ad3058'}, 'delete_on_termination': True, 'mount_device': '/dev/sda', 'device_type': None, 'boot_index': 0, 'disk_bus': None, 'guest_format': None, 'attachment_id': '1a63cf65-81c4-4905-bcfb-a42d5269d7e8', 'volume_type': None}], 'swap': None} {{(pid=63345) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 690.124239] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Root volume attach. Driver type: vmdk {{(pid=63345) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 690.125583] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3648ff-9ed8-4b4a-b49a-3a6e13308ffa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.135737] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32fdd24f-afdc-44d5-abef-c50b45e4735a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.142803] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8cd98c9-5855-493f-95fa-f14c854ae7e6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.150036] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-23579221-6bf6-49f1-9ecb-41a1fbea6fb8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.156638] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Waiting for the task: (returnval){ [ 690.156638] env[63345]: value = "task-1016794" [ 690.156638] env[63345]: _type = "Task" [ 690.156638] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.166267] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016794, 'name': RelocateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.195483] env[63345]: DEBUG nova.network.neutron [-] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.356857] env[63345]: DEBUG oslo_concurrency.lockutils [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.378021] env[63345]: DEBUG oslo_concurrency.lockutils [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.848s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.378474] env[63345]: DEBUG nova.compute.manager [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 690.381027] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.274s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.382530] env[63345]: INFO nova.compute.claims [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 690.468975] env[63345]: DEBUG oslo_vmware.api [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016793, 'name': PowerOnVM_Task, 'duration_secs': 0.695138} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.469293] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 690.469497] env[63345]: INFO nova.compute.manager [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Took 7.34 seconds to spawn the instance on the hypervisor. [ 690.469673] env[63345]: DEBUG nova.compute.manager [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 690.470586] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009d9a79-219b-4511-8553-1593445734ea {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.669370] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016794, 'name': RelocateVM_Task} progress is 40%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.698202] env[63345]: INFO nova.compute.manager [-] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Took 1.48 seconds to deallocate network for instance. [ 690.844408] env[63345]: DEBUG nova.network.neutron [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 690.887278] env[63345]: DEBUG nova.compute.utils [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 690.891921] env[63345]: DEBUG nova.compute.manager [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 690.892143] env[63345]: DEBUG nova.network.neutron [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 690.997699] env[63345]: INFO nova.compute.manager [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Took 42.97 seconds to build instance. [ 691.017568] env[63345]: DEBUG nova.policy [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '458a68ad62384753912a2274bbeca535', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4316c7bc78ca419788b6bc605f492129', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 691.021936] env[63345]: DEBUG nova.compute.manager [req-5785a973-8cad-42a1-99cb-97e45747aa13 req-5a11c8a6-d771-4041-ad73-a5992bf7e288 service nova] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Received event network-vif-plugged-a5abe431-00eb-4c22-81e4-d160cc76d360 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 691.022172] env[63345]: DEBUG oslo_concurrency.lockutils [req-5785a973-8cad-42a1-99cb-97e45747aa13 req-5a11c8a6-d771-4041-ad73-a5992bf7e288 service nova] Acquiring lock "0d5cb238-2d25-47b1-8ce6-15a20836dbfb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.022424] env[63345]: DEBUG oslo_concurrency.lockutils [req-5785a973-8cad-42a1-99cb-97e45747aa13 req-5a11c8a6-d771-4041-ad73-a5992bf7e288 service nova] Lock "0d5cb238-2d25-47b1-8ce6-15a20836dbfb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.022678] env[63345]: DEBUG oslo_concurrency.lockutils [req-5785a973-8cad-42a1-99cb-97e45747aa13 req-5a11c8a6-d771-4041-ad73-a5992bf7e288 service nova] Lock "0d5cb238-2d25-47b1-8ce6-15a20836dbfb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.022746] env[63345]: DEBUG nova.compute.manager [req-5785a973-8cad-42a1-99cb-97e45747aa13 req-5a11c8a6-d771-4041-ad73-a5992bf7e288 service nova] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] No waiting events found dispatching network-vif-plugged-a5abe431-00eb-4c22-81e4-d160cc76d360 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 691.022980] env[63345]: WARNING nova.compute.manager [req-5785a973-8cad-42a1-99cb-97e45747aa13 req-5a11c8a6-d771-4041-ad73-a5992bf7e288 service nova] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Received unexpected event network-vif-plugged-a5abe431-00eb-4c22-81e4-d160cc76d360 for instance with vm_state building and task_state spawning. [ 691.023121] env[63345]: DEBUG nova.compute.manager [req-5785a973-8cad-42a1-99cb-97e45747aa13 req-5a11c8a6-d771-4041-ad73-a5992bf7e288 service nova] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Received event network-changed-a5abe431-00eb-4c22-81e4-d160cc76d360 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 691.023306] env[63345]: DEBUG nova.compute.manager [req-5785a973-8cad-42a1-99cb-97e45747aa13 req-5a11c8a6-d771-4041-ad73-a5992bf7e288 service nova] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Refreshing instance network info cache due to event network-changed-a5abe431-00eb-4c22-81e4-d160cc76d360. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 691.023426] env[63345]: DEBUG oslo_concurrency.lockutils [req-5785a973-8cad-42a1-99cb-97e45747aa13 req-5a11c8a6-d771-4041-ad73-a5992bf7e288 service nova] Acquiring lock "refresh_cache-0d5cb238-2d25-47b1-8ce6-15a20836dbfb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.167993] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016794, 'name': RelocateVM_Task} progress is 53%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.198800] env[63345]: DEBUG nova.network.neutron [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Updating instance_info_cache with network_info: [{"id": "a5abe431-00eb-4c22-81e4-d160cc76d360", "address": "fa:16:3e:ab:60:f7", "network": {"id": "1888e7dd-bfd6-49d6-afb2-6ba1b22314cc", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-70760398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1f3a565957a4316af1b8fa14f81e75a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5abe431-00", "ovs_interfaceid": "a5abe431-00eb-4c22-81e4-d160cc76d360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.205755] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.392642] env[63345]: DEBUG nova.compute.manager [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 691.508481] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca3c2dee-5570-41af-87c7-cc4896ad3227 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "e6bc8cb9-2f1a-49cb-974d-ea9a211126ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 120.653s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.675732] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016794, 'name': RelocateVM_Task} progress is 67%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.701357] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Releasing lock "refresh_cache-0d5cb238-2d25-47b1-8ce6-15a20836dbfb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.701596] env[63345]: DEBUG nova.compute.manager [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Instance network_info: |[{"id": "a5abe431-00eb-4c22-81e4-d160cc76d360", "address": "fa:16:3e:ab:60:f7", "network": {"id": "1888e7dd-bfd6-49d6-afb2-6ba1b22314cc", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-70760398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1f3a565957a4316af1b8fa14f81e75a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5abe431-00", "ovs_interfaceid": "a5abe431-00eb-4c22-81e4-d160cc76d360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 691.701910] env[63345]: DEBUG oslo_concurrency.lockutils [req-5785a973-8cad-42a1-99cb-97e45747aa13 req-5a11c8a6-d771-4041-ad73-a5992bf7e288 service nova] Acquired lock "refresh_cache-0d5cb238-2d25-47b1-8ce6-15a20836dbfb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.702109] env[63345]: DEBUG nova.network.neutron [req-5785a973-8cad-42a1-99cb-97e45747aa13 req-5a11c8a6-d771-4041-ad73-a5992bf7e288 service nova] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Refreshing network info cache for port a5abe431-00eb-4c22-81e4-d160cc76d360 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 691.703378] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:60:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0df968ae-c1ef-4009-a0f4-6f2e799c2fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a5abe431-00eb-4c22-81e4-d160cc76d360', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 691.712665] env[63345]: DEBUG oslo.service.loopingcall [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 691.716198] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 691.716715] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2ccfa63-dcc6-4c0d-9525-ff2bce4dc738 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.741025] env[63345]: DEBUG nova.network.neutron [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Successfully created port: 6950f2a7-3573-46f0-9c46-3301f7ebcf5f {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 691.748588] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 691.748588] env[63345]: value = "task-1016795" [ 691.748588] env[63345]: _type = "Task" [ 691.748588] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.757209] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016795, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.996675] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cceb68d8-a302-42ea-8ed5-32ae553ca166 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.005260] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992da3a6-93df-4847-9a77-4d6724b05494 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.012246] env[63345]: DEBUG nova.compute.manager [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 692.051049] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bfda0e7-7df8-4fec-89f1-8139447fe537 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.061818] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a2be13-8c83-4dee-bf3c-97a880055cd2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.078522] env[63345]: DEBUG nova.compute.provider_tree [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.173055] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016794, 'name': RelocateVM_Task} progress is 81%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.260695] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016795, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.408026] env[63345]: DEBUG nova.compute.manager [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 692.439615] env[63345]: DEBUG nova.virt.hardware [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 692.440026] env[63345]: DEBUG nova.virt.hardware [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 692.440268] env[63345]: DEBUG nova.virt.hardware [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 692.440497] env[63345]: DEBUG nova.virt.hardware [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 692.440688] env[63345]: DEBUG nova.virt.hardware [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 692.441040] env[63345]: DEBUG nova.virt.hardware [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 692.441394] env[63345]: DEBUG nova.virt.hardware [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 692.441605] env[63345]: DEBUG nova.virt.hardware [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 692.441851] env[63345]: DEBUG nova.virt.hardware [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 692.442097] env[63345]: DEBUG nova.virt.hardware [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 692.442342] env[63345]: DEBUG nova.virt.hardware [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 692.443844] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a878a1c-c6bf-4a71-a366-4389d8421250 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.455257] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ea90f0-2061-488b-bf88-22d124c5fdfc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.500732] env[63345]: DEBUG nova.network.neutron [req-5785a973-8cad-42a1-99cb-97e45747aa13 req-5a11c8a6-d771-4041-ad73-a5992bf7e288 service nova] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Updated VIF entry in instance network info cache for port a5abe431-00eb-4c22-81e4-d160cc76d360. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 692.501107] env[63345]: DEBUG nova.network.neutron [req-5785a973-8cad-42a1-99cb-97e45747aa13 req-5a11c8a6-d771-4041-ad73-a5992bf7e288 service nova] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Updating instance_info_cache with network_info: [{"id": "a5abe431-00eb-4c22-81e4-d160cc76d360", "address": "fa:16:3e:ab:60:f7", "network": {"id": "1888e7dd-bfd6-49d6-afb2-6ba1b22314cc", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-70760398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1f3a565957a4316af1b8fa14f81e75a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5abe431-00", "ovs_interfaceid": "a5abe431-00eb-4c22-81e4-d160cc76d360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.570856] env[63345]: DEBUG oslo_concurrency.lockutils [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.581529] env[63345]: DEBUG nova.scheduler.client.report [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 692.675485] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016794, 'name': RelocateVM_Task} progress is 95%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.759721] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016795, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.004735] env[63345]: DEBUG oslo_concurrency.lockutils [req-5785a973-8cad-42a1-99cb-97e45747aa13 req-5a11c8a6-d771-4041-ad73-a5992bf7e288 service nova] Releasing lock "refresh_cache-0d5cb238-2d25-47b1-8ce6-15a20836dbfb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.004879] env[63345]: DEBUG nova.compute.manager [req-5785a973-8cad-42a1-99cb-97e45747aa13 req-5a11c8a6-d771-4041-ad73-a5992bf7e288 service nova] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Received event network-vif-deleted-973be2f9-7f10-4bd3-996a-1ca7b92520c4 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 693.087038] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.705s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.087356] env[63345]: DEBUG nova.compute.manager [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 693.089935] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.399s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.090187] env[63345]: DEBUG nova.objects.instance [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Lazy-loading 'resources' on Instance uuid 04fd7aaa-658d-480d-8465-825f120477bc {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 693.173083] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016794, 'name': RelocateVM_Task} progress is 97%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.262788] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016795, 'name': CreateVM_Task, 'duration_secs': 1.414115} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.263017] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 693.263706] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.263871] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.264213] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 693.264469] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bf78f15-8cb4-4430-aa9d-f46ab9188080 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.270552] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 693.270552] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]523969d3-f8bc-7687-4f7b-a9d66e9ea555" [ 693.270552] env[63345]: _type = "Task" [ 693.270552] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.278725] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]523969d3-f8bc-7687-4f7b-a9d66e9ea555, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.363808] env[63345]: DEBUG nova.compute.manager [req-79eb6b87-c9e3-4f6d-8f1f-db63f19cd58c req-0b9d84c2-0b80-4673-b6f1-2745960047d8 service nova] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Received event network-vif-plugged-6950f2a7-3573-46f0-9c46-3301f7ebcf5f {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 693.364053] env[63345]: DEBUG oslo_concurrency.lockutils [req-79eb6b87-c9e3-4f6d-8f1f-db63f19cd58c req-0b9d84c2-0b80-4673-b6f1-2745960047d8 service nova] Acquiring lock "c07c7f5d-a674-458f-8253-1bc2d61be6c1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.364272] env[63345]: DEBUG oslo_concurrency.lockutils [req-79eb6b87-c9e3-4f6d-8f1f-db63f19cd58c req-0b9d84c2-0b80-4673-b6f1-2745960047d8 service nova] Lock "c07c7f5d-a674-458f-8253-1bc2d61be6c1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.364443] env[63345]: DEBUG oslo_concurrency.lockutils [req-79eb6b87-c9e3-4f6d-8f1f-db63f19cd58c req-0b9d84c2-0b80-4673-b6f1-2745960047d8 service nova] Lock "c07c7f5d-a674-458f-8253-1bc2d61be6c1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.364616] env[63345]: DEBUG nova.compute.manager [req-79eb6b87-c9e3-4f6d-8f1f-db63f19cd58c req-0b9d84c2-0b80-4673-b6f1-2745960047d8 service nova] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] No waiting events found dispatching network-vif-plugged-6950f2a7-3573-46f0-9c46-3301f7ebcf5f {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 693.364799] env[63345]: WARNING nova.compute.manager [req-79eb6b87-c9e3-4f6d-8f1f-db63f19cd58c req-0b9d84c2-0b80-4673-b6f1-2745960047d8 service nova] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Received unexpected event network-vif-plugged-6950f2a7-3573-46f0-9c46-3301f7ebcf5f for instance with vm_state building and task_state spawning. [ 693.475243] env[63345]: DEBUG nova.network.neutron [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Successfully updated port: 6950f2a7-3573-46f0-9c46-3301f7ebcf5f {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 693.594069] env[63345]: DEBUG nova.compute.utils [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 693.598142] env[63345]: DEBUG nova.compute.manager [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 693.598312] env[63345]: DEBUG nova.network.neutron [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 693.661342] env[63345]: DEBUG nova.policy [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f61553bd9a642f7a91cfb2072177ba7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '705d13cb3bd04eebbfccb1353523d6a4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 693.675545] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016794, 'name': RelocateVM_Task} progress is 97%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.781154] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]523969d3-f8bc-7687-4f7b-a9d66e9ea555, 'name': SearchDatastore_Task, 'duration_secs': 0.010082} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.783735] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.783968] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 693.784221] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.784370] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.784548] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 693.784971] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e43bce3-eeeb-484b-b863-395385034727 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.793331] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 693.796938] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 693.796938] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e0e95ea-2cbc-4925-b260-87ad8667e84e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.803066] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 693.803066] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52deb0a5-af9b-4abc-84b8-de332f8b81bc" [ 693.803066] env[63345]: _type = "Task" [ 693.803066] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.811691] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52deb0a5-af9b-4abc-84b8-de332f8b81bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.979450] env[63345]: DEBUG oslo_concurrency.lockutils [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Acquiring lock "refresh_cache-c07c7f5d-a674-458f-8253-1bc2d61be6c1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.979599] env[63345]: DEBUG oslo_concurrency.lockutils [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Acquired lock "refresh_cache-c07c7f5d-a674-458f-8253-1bc2d61be6c1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.979794] env[63345]: DEBUG nova.network.neutron [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 694.098911] env[63345]: DEBUG nova.compute.manager [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 694.109278] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11a3941-f2af-4869-a974-388b8cd6b459 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.120593] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66b828d-7018-4cb6-a392-0a58f4014925 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.180360] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4548792f-c4ed-487f-b1eb-542b66d4cf5a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.194199] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48d3f80-a7d5-483b-b833-493227a23f06 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.197131] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016794, 'name': RelocateVM_Task} progress is 98%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.211471] env[63345]: DEBUG nova.compute.provider_tree [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.223313] env[63345]: DEBUG nova.network.neutron [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Successfully created port: a8153077-1984-4619-ae74-08c5902cfff8 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 694.317720] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52deb0a5-af9b-4abc-84b8-de332f8b81bc, 'name': SearchDatastore_Task, 'duration_secs': 0.009459} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.318326] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-056875ea-e023-46dd-96f5-8df8ed9650d7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.324607] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 694.324607] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52cb278b-5a45-c33b-3d1d-4a6d9df57a62" [ 694.324607] env[63345]: _type = "Task" [ 694.324607] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.335018] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52cb278b-5a45-c33b-3d1d-4a6d9df57a62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.552154] env[63345]: DEBUG nova.network.neutron [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 694.685484] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016794, 'name': RelocateVM_Task, 'duration_secs': 4.452238} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.685818] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Volume attach. Driver type: vmdk {{(pid=63345) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 694.687346] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-225930', 'volume_id': 'a6bb0405-c6c3-4109-b8e1-29b372ad3058', 'name': 'volume-a6bb0405-c6c3-4109-b8e1-29b372ad3058', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7bef089c-e93b-4ba6-a683-4e076489f92a', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6bb0405-c6c3-4109-b8e1-29b372ad3058', 'serial': 'a6bb0405-c6c3-4109-b8e1-29b372ad3058'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 694.688193] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d400fe55-63f0-431d-9591-c11b9770c800 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.707916] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50426172-50cf-4d42-9a7d-cc58bd9da557 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.714142] env[63345]: DEBUG nova.scheduler.client.report [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 694.741682] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] volume-a6bb0405-c6c3-4109-b8e1-29b372ad3058/volume-a6bb0405-c6c3-4109-b8e1-29b372ad3058.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 694.741682] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.651s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.743228] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c9800d6-0f4f-479d-bd98-0c8221ca31d6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.757788] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.921s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.759342] env[63345]: INFO nova.compute.claims [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 694.768125] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Waiting for the task: (returnval){ [ 694.768125] env[63345]: value = "task-1016796" [ 694.768125] env[63345]: _type = "Task" [ 694.768125] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.779705] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016796, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.788575] env[63345]: INFO nova.scheduler.client.report [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Deleted allocations for instance 04fd7aaa-658d-480d-8465-825f120477bc [ 694.835448] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52cb278b-5a45-c33b-3d1d-4a6d9df57a62, 'name': SearchDatastore_Task, 'duration_secs': 0.017595} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.835887] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.836052] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 0d5cb238-2d25-47b1-8ce6-15a20836dbfb/0d5cb238-2d25-47b1-8ce6-15a20836dbfb.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 694.836337] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ccdcf4b2-ccb8-467b-a4bd-ab6b9385cfce {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.845210] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 694.845210] env[63345]: value = "task-1016797" [ 694.845210] env[63345]: _type = "Task" [ 694.845210] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.854962] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016797, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.858097] env[63345]: DEBUG nova.network.neutron [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Updating instance_info_cache with network_info: [{"id": "6950f2a7-3573-46f0-9c46-3301f7ebcf5f", "address": "fa:16:3e:d7:c7:20", "network": {"id": "18285fd9-d154-415c-acbb-1494303e3b6c", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5dc99cc64e6c4d83928b309253a8df8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6950f2a7-35", "ovs_interfaceid": "6950f2a7-3573-46f0-9c46-3301f7ebcf5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.118757] env[63345]: DEBUG nova.compute.manager [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 695.148059] env[63345]: DEBUG nova.virt.hardware [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 695.148059] env[63345]: DEBUG nova.virt.hardware [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 695.148350] env[63345]: DEBUG nova.virt.hardware [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 695.148350] env[63345]: DEBUG nova.virt.hardware [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 695.148520] env[63345]: DEBUG nova.virt.hardware [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 695.148675] env[63345]: DEBUG nova.virt.hardware [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 695.148958] env[63345]: DEBUG nova.virt.hardware [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 695.149214] env[63345]: DEBUG nova.virt.hardware [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 695.149374] env[63345]: DEBUG nova.virt.hardware [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 695.149544] env[63345]: DEBUG nova.virt.hardware [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 695.149721] env[63345]: DEBUG nova.virt.hardware [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 695.150721] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0778c741-531b-4114-8c21-acb0416b3f78 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.161211] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f8c050a-0d3b-42c1-9f29-706763a8b9b0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.279286] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016796, 'name': ReconfigVM_Task, 'duration_secs': 0.350636} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.280996] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Reconfigured VM instance instance-00000025 to attach disk [datastore1] volume-a6bb0405-c6c3-4109-b8e1-29b372ad3058/volume-a6bb0405-c6c3-4109-b8e1-29b372ad3058.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 695.285066] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9a287e0-fba8-4eac-94ee-e8d4a12e87c0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.300269] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ef38e3d5-dbac-4904-a1a3-8bb19f5d01da tempest-AttachInterfacesV270Test-966357238 tempest-AttachInterfacesV270Test-966357238-project-member] Lock "04fd7aaa-658d-480d-8465-825f120477bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.039s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.308893] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Waiting for the task: (returnval){ [ 695.308893] env[63345]: value = "task-1016798" [ 695.308893] env[63345]: _type = "Task" [ 695.308893] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.318771] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016798, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.355267] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016797, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486889} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.355578] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 0d5cb238-2d25-47b1-8ce6-15a20836dbfb/0d5cb238-2d25-47b1-8ce6-15a20836dbfb.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 695.355829] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 695.356414] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-876b2665-d10b-484c-a599-84002626fb71 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.360753] env[63345]: DEBUG oslo_concurrency.lockutils [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Releasing lock "refresh_cache-c07c7f5d-a674-458f-8253-1bc2d61be6c1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 695.361118] env[63345]: DEBUG nova.compute.manager [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Instance network_info: |[{"id": "6950f2a7-3573-46f0-9c46-3301f7ebcf5f", "address": "fa:16:3e:d7:c7:20", "network": {"id": "18285fd9-d154-415c-acbb-1494303e3b6c", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5dc99cc64e6c4d83928b309253a8df8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6950f2a7-35", "ovs_interfaceid": "6950f2a7-3573-46f0-9c46-3301f7ebcf5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 695.362644] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:c7:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6950f2a7-3573-46f0-9c46-3301f7ebcf5f', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 695.370550] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Creating folder: Project (4316c7bc78ca419788b6bc605f492129). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 695.370894] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 695.370894] env[63345]: value = "task-1016799" [ 695.370894] env[63345]: _type = "Task" [ 695.370894] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.371115] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e46f98c5-63d1-4c7f-8cdc-79c36043def0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.382682] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016799, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.386139] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Created folder: Project (4316c7bc78ca419788b6bc605f492129) in parent group-v225918. [ 695.386376] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Creating folder: Instances. Parent ref: group-v225971. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 695.386644] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5107903-2c89-450a-a1a5-ccc847beac82 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.395249] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Created folder: Instances in parent group-v225971. [ 695.395479] env[63345]: DEBUG oslo.service.loopingcall [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 695.395690] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 695.395912] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-627a8f03-a48f-4829-9ded-86c722bcfd69 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.412406] env[63345]: DEBUG nova.compute.manager [req-c1a19736-cdb3-40f5-9104-068515101f7b req-1c1eea3b-8941-4a85-b3b6-952146cd1954 service nova] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Received event network-changed-6950f2a7-3573-46f0-9c46-3301f7ebcf5f {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 695.412600] env[63345]: DEBUG nova.compute.manager [req-c1a19736-cdb3-40f5-9104-068515101f7b req-1c1eea3b-8941-4a85-b3b6-952146cd1954 service nova] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Refreshing instance network info cache due to event network-changed-6950f2a7-3573-46f0-9c46-3301f7ebcf5f. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 695.412812] env[63345]: DEBUG oslo_concurrency.lockutils [req-c1a19736-cdb3-40f5-9104-068515101f7b req-1c1eea3b-8941-4a85-b3b6-952146cd1954 service nova] Acquiring lock "refresh_cache-c07c7f5d-a674-458f-8253-1bc2d61be6c1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 695.412952] env[63345]: DEBUG oslo_concurrency.lockutils [req-c1a19736-cdb3-40f5-9104-068515101f7b req-1c1eea3b-8941-4a85-b3b6-952146cd1954 service nova] Acquired lock "refresh_cache-c07c7f5d-a674-458f-8253-1bc2d61be6c1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.413124] env[63345]: DEBUG nova.network.neutron [req-c1a19736-cdb3-40f5-9104-068515101f7b req-1c1eea3b-8941-4a85-b3b6-952146cd1954 service nova] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Refreshing network info cache for port 6950f2a7-3573-46f0-9c46-3301f7ebcf5f {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 695.419734] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 695.419734] env[63345]: value = "task-1016802" [ 695.419734] env[63345]: _type = "Task" [ 695.419734] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.429007] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016802, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.818440] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016798, 'name': ReconfigVM_Task, 'duration_secs': 0.138968} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.818808] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-225930', 'volume_id': 'a6bb0405-c6c3-4109-b8e1-29b372ad3058', 'name': 'volume-a6bb0405-c6c3-4109-b8e1-29b372ad3058', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7bef089c-e93b-4ba6-a683-4e076489f92a', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6bb0405-c6c3-4109-b8e1-29b372ad3058', 'serial': 'a6bb0405-c6c3-4109-b8e1-29b372ad3058'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 695.819258] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-98fbf2a2-7596-4eb7-b314-3694ff948f94 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.826411] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Waiting for the task: (returnval){ [ 695.826411] env[63345]: value = "task-1016803" [ 695.826411] env[63345]: _type = "Task" [ 695.826411] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.839288] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016803, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.881811] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016799, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075977} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.884562] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 695.885798] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23205e76-d3bd-4793-80c6-7d93c8e83366 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.908410] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] 0d5cb238-2d25-47b1-8ce6-15a20836dbfb/0d5cb238-2d25-47b1-8ce6-15a20836dbfb.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 695.911113] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-691ba60d-a180-45a4-841e-8d224b86323a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.935822] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016802, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.937134] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 695.937134] env[63345]: value = "task-1016804" [ 695.937134] env[63345]: _type = "Task" [ 695.937134] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.948780] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016804, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.297810] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2318b101-ed11-4791-a654-85718da57fcf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.305707] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-963450c7-6c95-405f-8399-a5e743cb642a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.341842] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bec70fa-3692-4c3e-b0f5-eb2f7ba1c9df {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.349378] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016803, 'name': Rename_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.352547] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff062f46-e862-41f0-bd6e-c867cf098bd8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.366114] env[63345]: DEBUG nova.compute.provider_tree [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.437931] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016802, 'name': CreateVM_Task, 'duration_secs': 0.925289} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.438357] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 696.439878] env[63345]: DEBUG oslo_concurrency.lockutils [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 696.440160] env[63345]: DEBUG oslo_concurrency.lockutils [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.440549] env[63345]: DEBUG oslo_concurrency.lockutils [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 696.441254] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bba84a5-3b31-43f2-956a-0fc717cac167 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.449983] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Waiting for the task: (returnval){ [ 696.449983] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52039ef7-c591-a576-1aa4-0251b9556cc5" [ 696.449983] env[63345]: _type = "Task" [ 696.449983] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.454320] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016804, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.467681] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52039ef7-c591-a576-1aa4-0251b9556cc5, 'name': SearchDatastore_Task, 'duration_secs': 0.010611} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.467681] env[63345]: DEBUG oslo_concurrency.lockutils [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 696.467681] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 696.467681] env[63345]: DEBUG oslo_concurrency.lockutils [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 696.468308] env[63345]: DEBUG oslo_concurrency.lockutils [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.468308] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 696.468308] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c98a5b71-bf55-47e3-9456-5f0bedc1a679 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.484335] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 696.484632] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 696.485415] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc3b4489-48fa-4996-aa7b-b2ec87f4886e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.491991] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Waiting for the task: (returnval){ [ 696.491991] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]523c175d-5dd6-1fe1-d55f-de78d4ae57e7" [ 696.491991] env[63345]: _type = "Task" [ 696.491991] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.501796] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]523c175d-5dd6-1fe1-d55f-de78d4ae57e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.676278] env[63345]: DEBUG nova.network.neutron [req-c1a19736-cdb3-40f5-9104-068515101f7b req-1c1eea3b-8941-4a85-b3b6-952146cd1954 service nova] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Updated VIF entry in instance network info cache for port 6950f2a7-3573-46f0-9c46-3301f7ebcf5f. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 696.676759] env[63345]: DEBUG nova.network.neutron [req-c1a19736-cdb3-40f5-9104-068515101f7b req-1c1eea3b-8941-4a85-b3b6-952146cd1954 service nova] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Updating instance_info_cache with network_info: [{"id": "6950f2a7-3573-46f0-9c46-3301f7ebcf5f", "address": "fa:16:3e:d7:c7:20", "network": {"id": "18285fd9-d154-415c-acbb-1494303e3b6c", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5dc99cc64e6c4d83928b309253a8df8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6950f2a7-35", "ovs_interfaceid": "6950f2a7-3573-46f0-9c46-3301f7ebcf5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.696934] env[63345]: DEBUG nova.network.neutron [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Successfully updated port: a8153077-1984-4619-ae74-08c5902cfff8 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 696.851305] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016803, 'name': Rename_Task, 'duration_secs': 0.810325} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.851559] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 696.851824] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c321d7d-2df8-4d86-9c74-49175c46b47d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.858814] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Waiting for the task: (returnval){ [ 696.858814] env[63345]: value = "task-1016805" [ 696.858814] env[63345]: _type = "Task" [ 696.858814] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.866772] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016805, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.868833] env[63345]: DEBUG nova.scheduler.client.report [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 696.953779] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016804, 'name': ReconfigVM_Task, 'duration_secs': 0.734104} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.954091] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Reconfigured VM instance instance-00000026 to attach disk [datastore2] 0d5cb238-2d25-47b1-8ce6-15a20836dbfb/0d5cb238-2d25-47b1-8ce6-15a20836dbfb.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 696.954754] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a9966d67-4678-44a5-a1e0-d8c6ea5d222b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.961205] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 696.961205] env[63345]: value = "task-1016806" [ 696.961205] env[63345]: _type = "Task" [ 696.961205] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.970303] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016806, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.001802] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]523c175d-5dd6-1fe1-d55f-de78d4ae57e7, 'name': SearchDatastore_Task, 'duration_secs': 0.011835} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.002725] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e48a615-b17f-4c14-a0b4-f40ca6e14334 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.009329] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Waiting for the task: (returnval){ [ 697.009329] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52a2cc4a-28b7-5b6d-60c2-176e0f9fc0fc" [ 697.009329] env[63345]: _type = "Task" [ 697.009329] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.018300] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a2cc4a-28b7-5b6d-60c2-176e0f9fc0fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.179458] env[63345]: DEBUG oslo_concurrency.lockutils [req-c1a19736-cdb3-40f5-9104-068515101f7b req-1c1eea3b-8941-4a85-b3b6-952146cd1954 service nova] Releasing lock "refresh_cache-c07c7f5d-a674-458f-8253-1bc2d61be6c1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 697.199872] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Acquiring lock "refresh_cache-f043239f-7158-4199-a784-d711a5a301be" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.200131] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Acquired lock "refresh_cache-f043239f-7158-4199-a784-d711a5a301be" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.200330] env[63345]: DEBUG nova.network.neutron [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 697.369650] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016805, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.374625] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.617s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.375144] env[63345]: DEBUG nova.compute.manager [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 697.377632] env[63345]: DEBUG oslo_concurrency.lockutils [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 28.895s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.434704] env[63345]: DEBUG nova.compute.manager [req-3370dd98-4c73-4a86-87ab-687e31294b59 req-dd0fc6a5-edf8-4b0d-ab07-35936b55ed07 service nova] [instance: f043239f-7158-4199-a784-d711a5a301be] Received event network-vif-plugged-a8153077-1984-4619-ae74-08c5902cfff8 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 697.434966] env[63345]: DEBUG oslo_concurrency.lockutils [req-3370dd98-4c73-4a86-87ab-687e31294b59 req-dd0fc6a5-edf8-4b0d-ab07-35936b55ed07 service nova] Acquiring lock "f043239f-7158-4199-a784-d711a5a301be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.435234] env[63345]: DEBUG oslo_concurrency.lockutils [req-3370dd98-4c73-4a86-87ab-687e31294b59 req-dd0fc6a5-edf8-4b0d-ab07-35936b55ed07 service nova] Lock "f043239f-7158-4199-a784-d711a5a301be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.435411] env[63345]: DEBUG oslo_concurrency.lockutils [req-3370dd98-4c73-4a86-87ab-687e31294b59 req-dd0fc6a5-edf8-4b0d-ab07-35936b55ed07 service nova] Lock "f043239f-7158-4199-a784-d711a5a301be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.435646] env[63345]: DEBUG nova.compute.manager [req-3370dd98-4c73-4a86-87ab-687e31294b59 req-dd0fc6a5-edf8-4b0d-ab07-35936b55ed07 service nova] [instance: f043239f-7158-4199-a784-d711a5a301be] No waiting events found dispatching network-vif-plugged-a8153077-1984-4619-ae74-08c5902cfff8 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 697.435747] env[63345]: WARNING nova.compute.manager [req-3370dd98-4c73-4a86-87ab-687e31294b59 req-dd0fc6a5-edf8-4b0d-ab07-35936b55ed07 service nova] [instance: f043239f-7158-4199-a784-d711a5a301be] Received unexpected event network-vif-plugged-a8153077-1984-4619-ae74-08c5902cfff8 for instance with vm_state building and task_state spawning. [ 697.435946] env[63345]: DEBUG nova.compute.manager [req-3370dd98-4c73-4a86-87ab-687e31294b59 req-dd0fc6a5-edf8-4b0d-ab07-35936b55ed07 service nova] [instance: f043239f-7158-4199-a784-d711a5a301be] Received event network-changed-a8153077-1984-4619-ae74-08c5902cfff8 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 697.436086] env[63345]: DEBUG nova.compute.manager [req-3370dd98-4c73-4a86-87ab-687e31294b59 req-dd0fc6a5-edf8-4b0d-ab07-35936b55ed07 service nova] [instance: f043239f-7158-4199-a784-d711a5a301be] Refreshing instance network info cache due to event network-changed-a8153077-1984-4619-ae74-08c5902cfff8. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 697.436298] env[63345]: DEBUG oslo_concurrency.lockutils [req-3370dd98-4c73-4a86-87ab-687e31294b59 req-dd0fc6a5-edf8-4b0d-ab07-35936b55ed07 service nova] Acquiring lock "refresh_cache-f043239f-7158-4199-a784-d711a5a301be" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.472043] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016806, 'name': Rename_Task, 'duration_secs': 0.172951} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.472446] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 697.472735] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-908bc64c-81fe-473f-81e2-35f544f6965a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.483285] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 697.483285] env[63345]: value = "task-1016807" [ 697.483285] env[63345]: _type = "Task" [ 697.483285] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.491768] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016807, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.519982] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a2cc4a-28b7-5b6d-60c2-176e0f9fc0fc, 'name': SearchDatastore_Task, 'duration_secs': 0.011124} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.520303] env[63345]: DEBUG oslo_concurrency.lockutils [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 697.520571] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] c07c7f5d-a674-458f-8253-1bc2d61be6c1/c07c7f5d-a674-458f-8253-1bc2d61be6c1.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 697.520837] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-279fd486-1228-499f-9229-92d267e8adb4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.528528] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Waiting for the task: (returnval){ [ 697.528528] env[63345]: value = "task-1016808" [ 697.528528] env[63345]: _type = "Task" [ 697.528528] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.537087] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.735035] env[63345]: DEBUG nova.network.neutron [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 697.871825] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016805, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.881575] env[63345]: DEBUG nova.compute.utils [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 697.887167] env[63345]: INFO nova.compute.claims [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 697.890408] env[63345]: DEBUG nova.compute.manager [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 697.890767] env[63345]: DEBUG nova.network.neutron [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 697.994510] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016807, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.010913] env[63345]: DEBUG nova.policy [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '23618281441b44c5a4e29ee511b2df9a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1dab31b160f43b7897e0eac3d2024a1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 698.045821] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016808, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.085221] env[63345]: DEBUG nova.network.neutron [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Updating instance_info_cache with network_info: [{"id": "a8153077-1984-4619-ae74-08c5902cfff8", "address": "fa:16:3e:85:4a:56", "network": {"id": "2353863a-5385-41e5-b8e5-a376182d8801", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-647406981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "705d13cb3bd04eebbfccb1353523d6a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c7821ea-f92f-4f06-a4cb-05e1186a9d22", "external-id": "nsx-vlan-transportzone-69", "segmentation_id": 69, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8153077-19", "ovs_interfaceid": "a8153077-1984-4619-ae74-08c5902cfff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.372115] env[63345]: DEBUG oslo_vmware.api [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1016805, 'name': PowerOnVM_Task, 'duration_secs': 1.294926} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.372115] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 698.372400] env[63345]: INFO nova.compute.manager [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Took 10.73 seconds to spawn the instance on the hypervisor. [ 698.373031] env[63345]: DEBUG nova.compute.manager [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 698.374727] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b7b8c3-c0b2-49db-b2dc-eb0a7de0ead5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.393933] env[63345]: DEBUG nova.compute.manager [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 698.402655] env[63345]: INFO nova.compute.resource_tracker [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Updating resource usage from migration ed7f0ba8-ef84-42aa-81f6-263f46fbef39 [ 698.501638] env[63345]: DEBUG oslo_vmware.api [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016807, 'name': PowerOnVM_Task, 'duration_secs': 0.704917} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.504018] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 698.504343] env[63345]: INFO nova.compute.manager [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Took 9.94 seconds to spawn the instance on the hypervisor. [ 698.504541] env[63345]: DEBUG nova.compute.manager [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 698.505525] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c2bc8b-022e-406b-bf6a-eb53ddd81982 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.541077] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016808, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.67788} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.543977] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] c07c7f5d-a674-458f-8253-1bc2d61be6c1/c07c7f5d-a674-458f-8253-1bc2d61be6c1.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 698.544253] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 698.544720] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c38fcb9b-9a55-447f-831d-bc8c6d860958 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.553073] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Waiting for the task: (returnval){ [ 698.553073] env[63345]: value = "task-1016809" [ 698.553073] env[63345]: _type = "Task" [ 698.553073] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.565191] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016809, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.592278] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Releasing lock "refresh_cache-f043239f-7158-4199-a784-d711a5a301be" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.592278] env[63345]: DEBUG nova.compute.manager [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Instance network_info: |[{"id": "a8153077-1984-4619-ae74-08c5902cfff8", "address": "fa:16:3e:85:4a:56", "network": {"id": "2353863a-5385-41e5-b8e5-a376182d8801", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-647406981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "705d13cb3bd04eebbfccb1353523d6a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c7821ea-f92f-4f06-a4cb-05e1186a9d22", "external-id": "nsx-vlan-transportzone-69", "segmentation_id": 69, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8153077-19", "ovs_interfaceid": "a8153077-1984-4619-ae74-08c5902cfff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 698.592468] env[63345]: DEBUG oslo_concurrency.lockutils [req-3370dd98-4c73-4a86-87ab-687e31294b59 req-dd0fc6a5-edf8-4b0d-ab07-35936b55ed07 service nova] Acquired lock "refresh_cache-f043239f-7158-4199-a784-d711a5a301be" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.592468] env[63345]: DEBUG nova.network.neutron [req-3370dd98-4c73-4a86-87ab-687e31294b59 req-dd0fc6a5-edf8-4b0d-ab07-35936b55ed07 service nova] [instance: f043239f-7158-4199-a784-d711a5a301be] Refreshing network info cache for port a8153077-1984-4619-ae74-08c5902cfff8 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 698.593495] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:4a:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5c7821ea-f92f-4f06-a4cb-05e1186a9d22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8153077-1984-4619-ae74-08c5902cfff8', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 698.602671] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Creating folder: Project (705d13cb3bd04eebbfccb1353523d6a4). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 698.606499] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c8d2954a-8bbc-427b-a5ec-46c626c60de5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.619166] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Created folder: Project (705d13cb3bd04eebbfccb1353523d6a4) in parent group-v225918. [ 698.619166] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Creating folder: Instances. Parent ref: group-v225974. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 698.619463] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1ce4c24-4c0d-4f38-afc9-7a09ea847053 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.629794] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Created folder: Instances in parent group-v225974. [ 698.630107] env[63345]: DEBUG oslo.service.loopingcall [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 698.630871] env[63345]: DEBUG nova.network.neutron [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Successfully created port: 32300854-3281-41f9-8ba4-87a1c457e72c {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 698.634017] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f043239f-7158-4199-a784-d711a5a301be] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 698.634017] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3cd2c044-42bf-4bf3-a01f-82bb10645b84 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.660343] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 698.660343] env[63345]: value = "task-1016812" [ 698.660343] env[63345]: _type = "Task" [ 698.660343] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.669283] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016812, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.901543] env[63345]: INFO nova.compute.manager [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Took 49.64 seconds to build instance. [ 698.962894] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06042a05-9b56-4cf2-af38-ac9b1ffb14c6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.971432] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e161ffb6-45f8-4b08-bebc-5e5f0183f324 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.011969] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e59a43d2-bad2-4bb7-b275-6c12dba9a89c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.025650] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cff8a28-2f30-4f81-a688-dc461212860c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.033591] env[63345]: INFO nova.compute.manager [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Took 41.45 seconds to build instance. [ 699.048264] env[63345]: DEBUG nova.compute.provider_tree [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 699.064765] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016809, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.159263} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.065369] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 699.066760] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f0c9f4-38bd-44be-88d6-9606f55491ef {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.090780] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] c07c7f5d-a674-458f-8253-1bc2d61be6c1/c07c7f5d-a674-458f-8253-1bc2d61be6c1.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 699.091548] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2cce5e8-8404-4bc5-9e2f-9f719c43ae49 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.115883] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Waiting for the task: (returnval){ [ 699.115883] env[63345]: value = "task-1016813" [ 699.115883] env[63345]: _type = "Task" [ 699.115883] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.125792] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016813, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.170688] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016812, 'name': CreateVM_Task, 'duration_secs': 0.411549} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.170823] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f043239f-7158-4199-a784-d711a5a301be] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 699.171661] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.171911] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.172248] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 699.172551] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0b05f37-66d8-425b-9827-d7eadd650607 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.178425] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Waiting for the task: (returnval){ [ 699.178425] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52382b33-735e-cbfa-f0df-b5567ad3bf0e" [ 699.178425] env[63345]: _type = "Task" [ 699.178425] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.186728] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52382b33-735e-cbfa-f0df-b5567ad3bf0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.404061] env[63345]: DEBUG oslo_concurrency.lockutils [None req-519607c6-2f18-4a3f-8b87-6cb7a9f9fae7 tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Lock "7bef089c-e93b-4ba6-a683-4e076489f92a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 128.407s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.413976] env[63345]: DEBUG nova.compute.manager [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 699.450943] env[63345]: DEBUG nova.virt.hardware [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 699.451419] env[63345]: DEBUG nova.virt.hardware [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 699.451704] env[63345]: DEBUG nova.virt.hardware [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 699.452563] env[63345]: DEBUG nova.virt.hardware [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 699.452943] env[63345]: DEBUG nova.virt.hardware [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 699.453023] env[63345]: DEBUG nova.virt.hardware [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 699.453330] env[63345]: DEBUG nova.virt.hardware [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 699.453552] env[63345]: DEBUG nova.virt.hardware [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 699.453818] env[63345]: DEBUG nova.virt.hardware [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 699.454418] env[63345]: DEBUG nova.virt.hardware [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 699.454418] env[63345]: DEBUG nova.virt.hardware [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 699.456234] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa756803-a902-44a5-bc13-035d936d3f9b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.471106] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6565b9dc-a525-4b7f-a23d-6abcc8393e8c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.539253] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d38f5e9-afc1-443b-b99c-8a06994d5703 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "0d5cb238-2d25-47b1-8ce6-15a20836dbfb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 127.978s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.541690] env[63345]: DEBUG nova.network.neutron [req-3370dd98-4c73-4a86-87ab-687e31294b59 req-dd0fc6a5-edf8-4b0d-ab07-35936b55ed07 service nova] [instance: f043239f-7158-4199-a784-d711a5a301be] Updated VIF entry in instance network info cache for port a8153077-1984-4619-ae74-08c5902cfff8. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 699.542030] env[63345]: DEBUG nova.network.neutron [req-3370dd98-4c73-4a86-87ab-687e31294b59 req-dd0fc6a5-edf8-4b0d-ab07-35936b55ed07 service nova] [instance: f043239f-7158-4199-a784-d711a5a301be] Updating instance_info_cache with network_info: [{"id": "a8153077-1984-4619-ae74-08c5902cfff8", "address": "fa:16:3e:85:4a:56", "network": {"id": "2353863a-5385-41e5-b8e5-a376182d8801", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-647406981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "705d13cb3bd04eebbfccb1353523d6a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c7821ea-f92f-4f06-a4cb-05e1186a9d22", "external-id": "nsx-vlan-transportzone-69", "segmentation_id": 69, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8153077-19", "ovs_interfaceid": "a8153077-1984-4619-ae74-08c5902cfff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.582692] env[63345]: DEBUG nova.scheduler.client.report [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 72 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 699.582692] env[63345]: DEBUG nova.compute.provider_tree [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 72 to 73 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 699.582859] env[63345]: DEBUG nova.compute.provider_tree [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 699.627946] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016813, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.688854] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52382b33-735e-cbfa-f0df-b5567ad3bf0e, 'name': SearchDatastore_Task, 'duration_secs': 0.009842} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.689337] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.689695] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 699.690506] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.690746] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.691035] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 699.691342] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ede0e80f-54cc-4386-b78a-cabc388ca908 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.701236] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 699.701236] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 699.703211] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b03f854b-2d1f-43e4-bb85-f9825359a12d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.708063] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Waiting for the task: (returnval){ [ 699.708063] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]523a5a28-19bf-6293-d28a-53fe68fe6038" [ 699.708063] env[63345]: _type = "Task" [ 699.708063] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.716264] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]523a5a28-19bf-6293-d28a-53fe68fe6038, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.908984] env[63345]: DEBUG nova.compute.manager [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 700.044429] env[63345]: DEBUG nova.compute.manager [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 700.047586] env[63345]: DEBUG oslo_concurrency.lockutils [req-3370dd98-4c73-4a86-87ab-687e31294b59 req-dd0fc6a5-edf8-4b0d-ab07-35936b55ed07 service nova] Releasing lock "refresh_cache-f043239f-7158-4199-a784-d711a5a301be" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.087361] env[63345]: DEBUG oslo_concurrency.lockutils [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.710s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.087579] env[63345]: INFO nova.compute.manager [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Migrating [ 700.087815] env[63345]: DEBUG oslo_concurrency.lockutils [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.087961] env[63345]: DEBUG oslo_concurrency.lockutils [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquired lock "compute-rpcapi-router" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.091908] env[63345]: DEBUG oslo_concurrency.lockutils [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.148s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.093534] env[63345]: INFO nova.compute.claims [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 700.101060] env[63345]: INFO nova.compute.rpcapi [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 700.101060] env[63345]: DEBUG oslo_concurrency.lockutils [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Releasing lock "compute-rpcapi-router" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.130024] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016813, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.224197] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]523a5a28-19bf-6293-d28a-53fe68fe6038, 'name': SearchDatastore_Task, 'duration_secs': 0.011754} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.225164] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35e03114-597d-4d31-8ba5-60687fe56920 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.231403] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Waiting for the task: (returnval){ [ 700.231403] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]524d3000-bc82-9b78-ec0f-b0c6d22304e6" [ 700.231403] env[63345]: _type = "Task" [ 700.231403] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.239638] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524d3000-bc82-9b78-ec0f-b0c6d22304e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.442797] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.567064] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.617786] env[63345]: DEBUG oslo_concurrency.lockutils [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "refresh_cache-27c6dc17-4ded-4fe7-8fba-265eae64fc32" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.620505] env[63345]: DEBUG oslo_concurrency.lockutils [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquired lock "refresh_cache-27c6dc17-4ded-4fe7-8fba-265eae64fc32" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.620505] env[63345]: DEBUG nova.network.neutron [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 700.635807] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016813, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.747010] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524d3000-bc82-9b78-ec0f-b0c6d22304e6, 'name': SearchDatastore_Task, 'duration_secs': 0.009675} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.747388] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.747643] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] f043239f-7158-4199-a784-d711a5a301be/f043239f-7158-4199-a784-d711a5a301be.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 700.747913] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-05600218-e5fa-4a36-943d-fdc198831ecd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.755676] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Waiting for the task: (returnval){ [ 700.755676] env[63345]: value = "task-1016814" [ 700.755676] env[63345]: _type = "Task" [ 700.755676] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.765454] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': task-1016814, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.094434] env[63345]: DEBUG nova.network.neutron [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Successfully updated port: 32300854-3281-41f9-8ba4-87a1c457e72c {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 701.134780] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016813, 'name': ReconfigVM_Task, 'duration_secs': 1.845811} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.134916] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Reconfigured VM instance instance-00000027 to attach disk [datastore2] c07c7f5d-a674-458f-8253-1bc2d61be6c1/c07c7f5d-a674-458f-8253-1bc2d61be6c1.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 701.135817] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2755930-858f-4eba-b4af-78299599e1d6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.142169] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Waiting for the task: (returnval){ [ 701.142169] env[63345]: value = "task-1016815" [ 701.142169] env[63345]: _type = "Task" [ 701.142169] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.151697] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016815, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.271978] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': task-1016814, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.514388] env[63345]: DEBUG nova.network.neutron [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Updating instance_info_cache with network_info: [{"id": "8c1bd582-6867-4cba-9522-0e03560fa3f7", "address": "fa:16:3e:3d:4f:aa", "network": {"id": "18285fd9-d154-415c-acbb-1494303e3b6c", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5dc99cc64e6c4d83928b309253a8df8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c1bd582-68", "ovs_interfaceid": "8c1bd582-6867-4cba-9522-0e03560fa3f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.595691] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Acquiring lock "refresh_cache-30755716-03a7-41bd-90c2-7ef21baf9975" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.595848] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Acquired lock "refresh_cache-30755716-03a7-41bd-90c2-7ef21baf9975" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.595999] env[63345]: DEBUG nova.network.neutron [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 701.657755] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016815, 'name': Rename_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.659842] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86832c1d-e958-4de1-b86d-e7a82f42f258 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.666465] env[63345]: DEBUG nova.compute.manager [req-cfb35e53-0166-45e4-ab49-574e2e08d94a req-7695fb2d-ee8f-4e65-9259-fa2113b6d68f service nova] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Received event network-vif-plugged-32300854-3281-41f9-8ba4-87a1c457e72c {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 701.666787] env[63345]: DEBUG oslo_concurrency.lockutils [req-cfb35e53-0166-45e4-ab49-574e2e08d94a req-7695fb2d-ee8f-4e65-9259-fa2113b6d68f service nova] Acquiring lock "30755716-03a7-41bd-90c2-7ef21baf9975-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.667203] env[63345]: DEBUG oslo_concurrency.lockutils [req-cfb35e53-0166-45e4-ab49-574e2e08d94a req-7695fb2d-ee8f-4e65-9259-fa2113b6d68f service nova] Lock "30755716-03a7-41bd-90c2-7ef21baf9975-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.667467] env[63345]: DEBUG oslo_concurrency.lockutils [req-cfb35e53-0166-45e4-ab49-574e2e08d94a req-7695fb2d-ee8f-4e65-9259-fa2113b6d68f service nova] Lock "30755716-03a7-41bd-90c2-7ef21baf9975-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.667797] env[63345]: DEBUG nova.compute.manager [req-cfb35e53-0166-45e4-ab49-574e2e08d94a req-7695fb2d-ee8f-4e65-9259-fa2113b6d68f service nova] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] No waiting events found dispatching network-vif-plugged-32300854-3281-41f9-8ba4-87a1c457e72c {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 701.668105] env[63345]: WARNING nova.compute.manager [req-cfb35e53-0166-45e4-ab49-574e2e08d94a req-7695fb2d-ee8f-4e65-9259-fa2113b6d68f service nova] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Received unexpected event network-vif-plugged-32300854-3281-41f9-8ba4-87a1c457e72c for instance with vm_state building and task_state spawning. [ 701.674263] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7cf343-2031-49e1-bb9e-84de11213154 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.712022] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8eced6-00e6-4778-80ed-6d4cc571f72b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.722049] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82812c20-0c97-468b-b69d-29ed7fad8dc8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.738061] env[63345]: DEBUG nova.compute.provider_tree [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.766899] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': task-1016814, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.66975} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.767169] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] f043239f-7158-4199-a784-d711a5a301be/f043239f-7158-4199-a784-d711a5a301be.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 701.767387] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 701.767650] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b5b92aeb-1d2d-4d62-b2b4-3e206eee68eb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.773674] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Waiting for the task: (returnval){ [ 701.773674] env[63345]: value = "task-1016816" [ 701.773674] env[63345]: _type = "Task" [ 701.773674] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.783034] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': task-1016816, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.931078] env[63345]: DEBUG oslo_concurrency.lockutils [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Acquiring lock "a9b69d13-6330-4f9b-b8e1-1c0017655f9f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.931431] env[63345]: DEBUG oslo_concurrency.lockutils [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Lock "a9b69d13-6330-4f9b-b8e1-1c0017655f9f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.020722] env[63345]: DEBUG oslo_concurrency.lockutils [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Releasing lock "refresh_cache-27c6dc17-4ded-4fe7-8fba-265eae64fc32" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 702.155631] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016815, 'name': Rename_Task, 'duration_secs': 0.71262} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.156366] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 702.159018] env[63345]: DEBUG nova.network.neutron [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 702.159559] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ec1e1bb-a0c7-4b2f-9670-7459c64d2b07 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.169023] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Waiting for the task: (returnval){ [ 702.169023] env[63345]: value = "task-1016817" [ 702.169023] env[63345]: _type = "Task" [ 702.169023] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.175876] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016817, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.242080] env[63345]: DEBUG nova.scheduler.client.report [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 702.286849] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': task-1016816, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06001} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.288077] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 702.288960] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c73f6f5-1160-4567-86d2-b829ad1bee73 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.317674] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] f043239f-7158-4199-a784-d711a5a301be/f043239f-7158-4199-a784-d711a5a301be.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 702.318024] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49500268-e559-46d7-a99f-0b6ff1c226b3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.340830] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Waiting for the task: (returnval){ [ 702.340830] env[63345]: value = "task-1016818" [ 702.340830] env[63345]: _type = "Task" [ 702.340830] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.351727] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': task-1016818, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.441723] env[63345]: DEBUG nova.network.neutron [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Updating instance_info_cache with network_info: [{"id": "32300854-3281-41f9-8ba4-87a1c457e72c", "address": "fa:16:3e:b2:ca:8b", "network": {"id": "e181c356-7853-4158-96d0-47f099cb900b", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-735360173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1dab31b160f43b7897e0eac3d2024a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32300854-32", "ovs_interfaceid": "32300854-3281-41f9-8ba4-87a1c457e72c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.678112] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016817, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.747665] env[63345]: DEBUG oslo_concurrency.lockutils [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.658s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.747665] env[63345]: DEBUG nova.compute.manager [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 702.751829] env[63345]: DEBUG oslo_concurrency.lockutils [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.105s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.754941] env[63345]: INFO nova.compute.claims [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 702.851136] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': task-1016818, 'name': ReconfigVM_Task, 'duration_secs': 0.306457} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.851425] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Reconfigured VM instance instance-00000028 to attach disk [datastore2] f043239f-7158-4199-a784-d711a5a301be/f043239f-7158-4199-a784-d711a5a301be.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 702.852028] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d8b780e-848a-42db-b46b-ee802d53bdf3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.859209] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Waiting for the task: (returnval){ [ 702.859209] env[63345]: value = "task-1016819" [ 702.859209] env[63345]: _type = "Task" [ 702.859209] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.867251] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': task-1016819, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.942999] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Releasing lock "refresh_cache-30755716-03a7-41bd-90c2-7ef21baf9975" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 702.943388] env[63345]: DEBUG nova.compute.manager [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Instance network_info: |[{"id": "32300854-3281-41f9-8ba4-87a1c457e72c", "address": "fa:16:3e:b2:ca:8b", "network": {"id": "e181c356-7853-4158-96d0-47f099cb900b", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-735360173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1dab31b160f43b7897e0eac3d2024a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32300854-32", "ovs_interfaceid": "32300854-3281-41f9-8ba4-87a1c457e72c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 702.943908] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:ca:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '88651df2-0506-4f6c-b868-dd30a81f2b1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '32300854-3281-41f9-8ba4-87a1c457e72c', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 702.951715] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Creating folder: Project (f1dab31b160f43b7897e0eac3d2024a1). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 702.952008] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a4b0ac6-154c-4e25-94c0-5539f8a6da48 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.963491] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Created folder: Project (f1dab31b160f43b7897e0eac3d2024a1) in parent group-v225918. [ 702.963831] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Creating folder: Instances. Parent ref: group-v225977. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 702.964198] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ea60981-c149-4a93-a545-d18eb3082578 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.973411] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Created folder: Instances in parent group-v225977. [ 702.973666] env[63345]: DEBUG oslo.service.loopingcall [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 702.973860] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 702.974077] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dec26284-e9ab-4d15-8200-62574c350afe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.003360] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 703.003360] env[63345]: value = "task-1016822" [ 703.003360] env[63345]: _type = "Task" [ 703.003360] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.010822] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016822, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.178013] env[63345]: DEBUG oslo_vmware.api [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016817, 'name': PowerOnVM_Task, 'duration_secs': 0.796585} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.178304] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 703.178490] env[63345]: INFO nova.compute.manager [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Took 10.77 seconds to spawn the instance on the hypervisor. [ 703.178802] env[63345]: DEBUG nova.compute.manager [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 703.179594] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646df010-8a6d-4ea3-9c6e-83f7003078d7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.257540] env[63345]: DEBUG nova.compute.utils [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 703.261519] env[63345]: DEBUG nova.compute.manager [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 703.261765] env[63345]: DEBUG nova.network.neutron [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 703.341828] env[63345]: DEBUG nova.policy [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dd618fef89a843209784ca9e925d18eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cb91ecf5d00e48dea9baf2122ac4fed7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 703.369624] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': task-1016819, 'name': Rename_Task, 'duration_secs': 0.143551} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.369893] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 703.370328] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-59b0c4c6-30c6-4d26-9a4d-d422ddf4a051 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.376781] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Waiting for the task: (returnval){ [ 703.376781] env[63345]: value = "task-1016823" [ 703.376781] env[63345]: _type = "Task" [ 703.376781] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.384782] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': task-1016823, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.513167] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016822, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.535998] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f4c280-7f82-4aa5-8555-a67079d554bb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.555549] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Updating instance '27c6dc17-4ded-4fe7-8fba-265eae64fc32' progress to 0 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 703.589819] env[63345]: DEBUG oslo_concurrency.lockutils [None req-81c37da8-fd8e-48cc-a236-dcef4cac536f tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "3e4e58bd-903b-4b3d-8be4-5678aab6c721" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.590138] env[63345]: DEBUG oslo_concurrency.lockutils [None req-81c37da8-fd8e-48cc-a236-dcef4cac536f tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "3e4e58bd-903b-4b3d-8be4-5678aab6c721" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.590332] env[63345]: DEBUG nova.compute.manager [None req-81c37da8-fd8e-48cc-a236-dcef4cac536f tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 703.591228] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a7b00c4-4e6e-40f1-9a7d-de059284c730 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.597941] env[63345]: DEBUG nova.compute.manager [None req-81c37da8-fd8e-48cc-a236-dcef4cac536f tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63345) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3403}} [ 703.598509] env[63345]: DEBUG nova.objects.instance [None req-81c37da8-fd8e-48cc-a236-dcef4cac536f tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lazy-loading 'flavor' on Instance uuid 3e4e58bd-903b-4b3d-8be4-5678aab6c721 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 703.698631] env[63345]: INFO nova.compute.manager [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Took 43.17 seconds to build instance. [ 703.765968] env[63345]: DEBUG nova.compute.manager [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 703.889805] env[63345]: DEBUG oslo_vmware.api [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': task-1016823, 'name': PowerOnVM_Task, 'duration_secs': 0.442258} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.889805] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 703.889938] env[63345]: INFO nova.compute.manager [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Took 8.77 seconds to spawn the instance on the hypervisor. [ 703.891041] env[63345]: DEBUG nova.compute.manager [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 703.891866] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b502f925-89a0-48d4-a3ad-9fc4c3c30a79 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.914868] env[63345]: DEBUG nova.compute.manager [req-07cc0527-8046-4e6d-9f5d-d5b5e5d9cbed req-3fd461ce-c4c1-49fc-b34e-4aceca9dd3ff service nova] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Received event network-changed-49047c62-1eed-4563-b10d-31b82cc302ff {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 703.916902] env[63345]: DEBUG nova.compute.manager [req-07cc0527-8046-4e6d-9f5d-d5b5e5d9cbed req-3fd461ce-c4c1-49fc-b34e-4aceca9dd3ff service nova] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Refreshing instance network info cache due to event network-changed-49047c62-1eed-4563-b10d-31b82cc302ff. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 703.916902] env[63345]: DEBUG oslo_concurrency.lockutils [req-07cc0527-8046-4e6d-9f5d-d5b5e5d9cbed req-3fd461ce-c4c1-49fc-b34e-4aceca9dd3ff service nova] Acquiring lock "refresh_cache-7bef089c-e93b-4ba6-a683-4e076489f92a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 703.916902] env[63345]: DEBUG oslo_concurrency.lockutils [req-07cc0527-8046-4e6d-9f5d-d5b5e5d9cbed req-3fd461ce-c4c1-49fc-b34e-4aceca9dd3ff service nova] Acquired lock "refresh_cache-7bef089c-e93b-4ba6-a683-4e076489f92a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.916902] env[63345]: DEBUG nova.network.neutron [req-07cc0527-8046-4e6d-9f5d-d5b5e5d9cbed req-3fd461ce-c4c1-49fc-b34e-4aceca9dd3ff service nova] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Refreshing network info cache for port 49047c62-1eed-4563-b10d-31b82cc302ff {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 703.964782] env[63345]: DEBUG nova.network.neutron [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Successfully created port: 9b0555db-b627-44ae-8812-42415d554cde {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 704.020645] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016822, 'name': CreateVM_Task, 'duration_secs': 0.693379} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.020645] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 704.021133] env[63345]: DEBUG oslo_vmware.service [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8920343c-a49f-446f-a2c4-7b1e558f1ce4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.028863] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.029168] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.029684] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 704.030160] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d12d79b8-1673-42cd-b3df-c4e962f03235 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.034912] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Waiting for the task: (returnval){ [ 704.034912] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5232de9d-c9b4-2876-8008-b90ad1000fa1" [ 704.034912] env[63345]: _type = "Task" [ 704.034912] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.042819] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5232de9d-c9b4-2876-8008-b90ad1000fa1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.060945] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 704.061534] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bc290503-3db3-4429-aead-388b320f82db {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.070662] env[63345]: DEBUG oslo_vmware.api [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 704.070662] env[63345]: value = "task-1016824" [ 704.070662] env[63345]: _type = "Task" [ 704.070662] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.081745] env[63345]: DEBUG oslo_vmware.api [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016824, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.202803] env[63345]: DEBUG oslo_concurrency.lockutils [None req-14790605-096b-44fd-8e57-1a9ebe78d916 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Lock "c07c7f5d-a674-458f-8253-1bc2d61be6c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 132.447s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.290890] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6aff9d8-61f7-4715-858f-509e253dbc46 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.301979] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9374b390-1e73-4f41-a6fc-9f1bcea503f5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.338242] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d5fd42e-4bca-4b0d-b88b-8d001d89e3bb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.347108] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980ac157-9e32-4849-9cc6-9e1ecfe85a9b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.365041] env[63345]: DEBUG nova.compute.provider_tree [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 704.416558] env[63345]: INFO nova.compute.manager [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Took 39.33 seconds to build instance. [ 704.545852] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 704.546130] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 704.546425] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.546578] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.546776] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 704.547085] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8642bf0-54d9-4de0-ade8-f164d306f58e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.555820] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 704.556019] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 704.556765] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83778148-c709-41e2-ae4b-d4eea9b17a42 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.563762] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76d3724d-0e51-4e9b-956d-e06365e85ce7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.568915] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Waiting for the task: (returnval){ [ 704.568915] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d19c5d-57c4-3595-adab-5def521d811e" [ 704.568915] env[63345]: _type = "Task" [ 704.568915] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.581982] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d19c5d-57c4-3595-adab-5def521d811e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.585467] env[63345]: DEBUG oslo_vmware.api [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016824, 'name': PowerOffVM_Task, 'duration_secs': 0.296229} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.585857] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 704.586091] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Updating instance '27c6dc17-4ded-4fe7-8fba-265eae64fc32' progress to 17 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 704.608617] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-81c37da8-fd8e-48cc-a236-dcef4cac536f tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 704.608910] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ed5a45c-c27a-415d-8aa0-ff58bcbc827b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.615648] env[63345]: DEBUG oslo_vmware.api [None req-81c37da8-fd8e-48cc-a236-dcef4cac536f tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 704.615648] env[63345]: value = "task-1016825" [ 704.615648] env[63345]: _type = "Task" [ 704.615648] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.624152] env[63345]: DEBUG oslo_vmware.api [None req-81c37da8-fd8e-48cc-a236-dcef4cac536f tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016825, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.691604] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Acquiring lock "c07c7f5d-a674-458f-8253-1bc2d61be6c1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.691803] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Lock "c07c7f5d-a674-458f-8253-1bc2d61be6c1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.692020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Acquiring lock "c07c7f5d-a674-458f-8253-1bc2d61be6c1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.692215] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Lock "c07c7f5d-a674-458f-8253-1bc2d61be6c1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.692359] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Lock "c07c7f5d-a674-458f-8253-1bc2d61be6c1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.694362] env[63345]: INFO nova.compute.manager [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Terminating instance [ 704.707189] env[63345]: DEBUG nova.compute.manager [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 704.799304] env[63345]: DEBUG nova.compute.manager [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 704.832182] env[63345]: DEBUG nova.virt.hardware [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 704.832462] env[63345]: DEBUG nova.virt.hardware [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 704.832582] env[63345]: DEBUG nova.virt.hardware [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.832762] env[63345]: DEBUG nova.virt.hardware [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 704.832907] env[63345]: DEBUG nova.virt.hardware [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.833598] env[63345]: DEBUG nova.virt.hardware [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 704.833820] env[63345]: DEBUG nova.virt.hardware [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 704.833988] env[63345]: DEBUG nova.virt.hardware [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 704.834232] env[63345]: DEBUG nova.virt.hardware [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 704.834454] env[63345]: DEBUG nova.virt.hardware [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 704.834642] env[63345]: DEBUG nova.virt.hardware [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 704.835614] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0604111a-371c-4d69-ac8c-626d4ada1266 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.847871] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e640f130-269b-4337-8dd6-6f5ec2e673c7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.869119] env[63345]: DEBUG nova.scheduler.client.report [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 704.873171] env[63345]: DEBUG nova.network.neutron [req-07cc0527-8046-4e6d-9f5d-d5b5e5d9cbed req-3fd461ce-c4c1-49fc-b34e-4aceca9dd3ff service nova] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Updated VIF entry in instance network info cache for port 49047c62-1eed-4563-b10d-31b82cc302ff. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 704.873647] env[63345]: DEBUG nova.network.neutron [req-07cc0527-8046-4e6d-9f5d-d5b5e5d9cbed req-3fd461ce-c4c1-49fc-b34e-4aceca9dd3ff service nova] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Updating instance_info_cache with network_info: [{"id": "49047c62-1eed-4563-b10d-31b82cc302ff", "address": "fa:16:3e:e8:a3:50", "network": {"id": "256a54f6-a458-4574-98b2-7ae39111a4e1", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-542895641-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a8f4cca8304eaead620b510eba103f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43798f54-0c07-4417-a23f-58bb6b7e204b", "external-id": "nsx-vlan-transportzone-571", "segmentation_id": 571, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49047c62-1e", "ovs_interfaceid": "49047c62-1eed-4563-b10d-31b82cc302ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.918702] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aee37377-4a9d-4ff1-9b3c-01248e919c58 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Lock "f043239f-7158-4199-a784-d711a5a301be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 129.364s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.081653] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Preparing fetch location {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 705.082041] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Creating directory with path [datastore1] vmware_temp/d8bb06a9-33b8-4db3-a450-f0780240dc42/2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 705.082394] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09b9cf50-114e-4ad1-acd1-363bb2111597 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.093700] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 705.093934] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 705.094100] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 705.094284] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 705.094430] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 705.094576] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 705.094772] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 705.094930] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 705.095118] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 705.095280] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 705.095454] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 705.102289] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-565f5df8-9033-48e1-b5cf-8de887e7f7ad {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.114291] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Created directory with path [datastore1] vmware_temp/d8bb06a9-33b8-4db3-a450-f0780240dc42/2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 705.114532] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Fetch image to [datastore1] vmware_temp/d8bb06a9-33b8-4db3-a450-f0780240dc42/2ff49e1b-8f44-4332-bba9-777d55ff62c4/tmp-sparse.vmdk {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 705.114707] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Downloading image file data 2ff49e1b-8f44-4332-bba9-777d55ff62c4 to [datastore1] vmware_temp/d8bb06a9-33b8-4db3-a450-f0780240dc42/2ff49e1b-8f44-4332-bba9-777d55ff62c4/tmp-sparse.vmdk on the data store datastore1 {{(pid=63345) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 705.115615] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a80764-764c-42ff-91af-e59e8340ca4f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.123880] env[63345]: DEBUG oslo_vmware.api [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 705.123880] env[63345]: value = "task-1016826" [ 705.123880] env[63345]: _type = "Task" [ 705.123880] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.132984] env[63345]: DEBUG oslo_vmware.api [None req-81c37da8-fd8e-48cc-a236-dcef4cac536f tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016825, 'name': PowerOffVM_Task, 'duration_secs': 0.423418} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.136760] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5291c2-3e33-454a-83f3-f9accb837e3f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.139177] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-81c37da8-fd8e-48cc-a236-dcef4cac536f tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 705.139390] env[63345]: DEBUG nova.compute.manager [None req-81c37da8-fd8e-48cc-a236-dcef4cac536f tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 705.140128] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86f68b6-fa30-4202-ae6e-a6cd39c78e52 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.155070] env[63345]: DEBUG oslo_vmware.api [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016826, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.159663] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc168254-8104-4c2b-9459-be28a2338318 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.203256] env[63345]: DEBUG nova.compute.manager [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 705.203606] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 705.205420] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6304cbba-6665-4f5e-852f-b088db50642b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.209072] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8caf93b5-0520-494a-97af-c6f3648d3a81 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.221772] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 705.222283] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0afcb6e1-2723-420d-a8b1-c872330d99d4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.223651] env[63345]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-54918ef3-492b-4d1e-8574-ddd1a0547661 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.230307] env[63345]: DEBUG oslo_vmware.api [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Waiting for the task: (returnval){ [ 705.230307] env[63345]: value = "task-1016827" [ 705.230307] env[63345]: _type = "Task" [ 705.230307] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.234208] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.239511] env[63345]: DEBUG oslo_vmware.api [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016827, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.248688] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Downloading image file data 2ff49e1b-8f44-4332-bba9-777d55ff62c4 to the data store datastore1 {{(pid=63345) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 705.300251] env[63345]: DEBUG oslo_vmware.rw_handles [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d8bb06a9-33b8-4db3-a450-f0780240dc42/2ff49e1b-8f44-4332-bba9-777d55ff62c4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63345) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 705.376442] env[63345]: DEBUG oslo_concurrency.lockutils [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.376978] env[63345]: DEBUG nova.compute.manager [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 705.380066] env[63345]: DEBUG oslo_concurrency.lockutils [req-07cc0527-8046-4e6d-9f5d-d5b5e5d9cbed req-3fd461ce-c4c1-49fc-b34e-4aceca9dd3ff service nova] Releasing lock "refresh_cache-7bef089c-e93b-4ba6-a683-4e076489f92a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.380336] env[63345]: DEBUG nova.compute.manager [req-07cc0527-8046-4e6d-9f5d-d5b5e5d9cbed req-3fd461ce-c4c1-49fc-b34e-4aceca9dd3ff service nova] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Received event network-changed-32300854-3281-41f9-8ba4-87a1c457e72c {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 705.380515] env[63345]: DEBUG nova.compute.manager [req-07cc0527-8046-4e6d-9f5d-d5b5e5d9cbed req-3fd461ce-c4c1-49fc-b34e-4aceca9dd3ff service nova] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Refreshing instance network info cache due to event network-changed-32300854-3281-41f9-8ba4-87a1c457e72c. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 705.380725] env[63345]: DEBUG oslo_concurrency.lockutils [req-07cc0527-8046-4e6d-9f5d-d5b5e5d9cbed req-3fd461ce-c4c1-49fc-b34e-4aceca9dd3ff service nova] Acquiring lock "refresh_cache-30755716-03a7-41bd-90c2-7ef21baf9975" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.380868] env[63345]: DEBUG oslo_concurrency.lockutils [req-07cc0527-8046-4e6d-9f5d-d5b5e5d9cbed req-3fd461ce-c4c1-49fc-b34e-4aceca9dd3ff service nova] Acquired lock "refresh_cache-30755716-03a7-41bd-90c2-7ef21baf9975" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.381056] env[63345]: DEBUG nova.network.neutron [req-07cc0527-8046-4e6d-9f5d-d5b5e5d9cbed req-3fd461ce-c4c1-49fc-b34e-4aceca9dd3ff service nova] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Refreshing network info cache for port 32300854-3281-41f9-8ba4-87a1c457e72c {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 705.382889] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.282s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.383154] env[63345]: DEBUG nova.objects.instance [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Lazy-loading 'resources' on Instance uuid 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 705.420974] env[63345]: DEBUG nova.compute.manager [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 705.642980] env[63345]: DEBUG oslo_vmware.api [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016826, 'name': ReconfigVM_Task, 'duration_secs': 0.415149} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.643534] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Updating instance '27c6dc17-4ded-4fe7-8fba-265eae64fc32' progress to 33 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 705.674289] env[63345]: DEBUG oslo_concurrency.lockutils [None req-81c37da8-fd8e-48cc-a236-dcef4cac536f tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "3e4e58bd-903b-4b3d-8be4-5678aab6c721" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.084s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.743937] env[63345]: DEBUG oslo_vmware.api [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016827, 'name': PowerOffVM_Task, 'duration_secs': 0.26675} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.744743] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 705.745008] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 705.745352] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5187439b-4d86-4cac-96f6-cee6656853b8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.826329] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 705.826574] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 705.826705] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Deleting the datastore file [datastore2] c07c7f5d-a674-458f-8253-1bc2d61be6c1 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 705.826947] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9ea0686-5a75-4ae9-a3cd-eb966e7634ac {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.833681] env[63345]: DEBUG oslo_vmware.api [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Waiting for the task: (returnval){ [ 705.833681] env[63345]: value = "task-1016829" [ 705.833681] env[63345]: _type = "Task" [ 705.833681] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.842179] env[63345]: DEBUG oslo_vmware.api [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016829, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.884804] env[63345]: DEBUG nova.compute.utils [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 705.888387] env[63345]: DEBUG nova.compute.manager [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 705.888588] env[63345]: DEBUG nova.network.neutron [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 705.943704] env[63345]: DEBUG nova.policy [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82fd4c0af10549709f6a8f49fd8840f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b367b9e5b9f945b6aad6b9b5f9900e8c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 705.950332] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 706.046044] env[63345]: DEBUG oslo_vmware.rw_handles [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Completed reading data from the image iterator. {{(pid=63345) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 706.046518] env[63345]: DEBUG oslo_vmware.rw_handles [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d8bb06a9-33b8-4db3-a450-f0780240dc42/2ff49e1b-8f44-4332-bba9-777d55ff62c4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63345) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 706.126715] env[63345]: DEBUG nova.compute.manager [req-7ee041f4-e7d8-4ede-ad8b-3f9ac9fe5876 req-2037ff20-88a5-46ec-b1bd-4ace6399d366 service nova] [instance: f043239f-7158-4199-a784-d711a5a301be] Received event network-changed-a8153077-1984-4619-ae74-08c5902cfff8 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 706.126715] env[63345]: DEBUG nova.compute.manager [req-7ee041f4-e7d8-4ede-ad8b-3f9ac9fe5876 req-2037ff20-88a5-46ec-b1bd-4ace6399d366 service nova] [instance: f043239f-7158-4199-a784-d711a5a301be] Refreshing instance network info cache due to event network-changed-a8153077-1984-4619-ae74-08c5902cfff8. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 706.127249] env[63345]: DEBUG oslo_concurrency.lockutils [req-7ee041f4-e7d8-4ede-ad8b-3f9ac9fe5876 req-2037ff20-88a5-46ec-b1bd-4ace6399d366 service nova] Acquiring lock "refresh_cache-f043239f-7158-4199-a784-d711a5a301be" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.127249] env[63345]: DEBUG oslo_concurrency.lockutils [req-7ee041f4-e7d8-4ede-ad8b-3f9ac9fe5876 req-2037ff20-88a5-46ec-b1bd-4ace6399d366 service nova] Acquired lock "refresh_cache-f043239f-7158-4199-a784-d711a5a301be" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.127438] env[63345]: DEBUG nova.network.neutron [req-7ee041f4-e7d8-4ede-ad8b-3f9ac9fe5876 req-2037ff20-88a5-46ec-b1bd-4ace6399d366 service nova] [instance: f043239f-7158-4199-a784-d711a5a301be] Refreshing network info cache for port a8153077-1984-4619-ae74-08c5902cfff8 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 706.153500] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='e0f196a9-2434-4e97-8d5f-115ba2c65179',id=36,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-506807631',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 706.153760] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 706.153927] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 706.155775] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 706.155775] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 706.155775] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 706.155775] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 706.156180] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 706.156180] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 706.156267] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 706.156452] env[63345]: DEBUG nova.virt.hardware [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 706.162386] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Reconfiguring VM instance instance-0000001f to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 706.168977] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99806a28-be98-41bf-b87b-fdd231e2888e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.191778] env[63345]: DEBUG oslo_vmware.api [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 706.191778] env[63345]: value = "task-1016830" [ 706.191778] env[63345]: _type = "Task" [ 706.191778] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.205415] env[63345]: DEBUG oslo_vmware.api [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016830, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.206447] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Downloaded image file data 2ff49e1b-8f44-4332-bba9-777d55ff62c4 to vmware_temp/d8bb06a9-33b8-4db3-a450-f0780240dc42/2ff49e1b-8f44-4332-bba9-777d55ff62c4/tmp-sparse.vmdk on the data store datastore1 {{(pid=63345) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 706.208987] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Caching image {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 706.209296] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Copying Virtual Disk [datastore1] vmware_temp/d8bb06a9-33b8-4db3-a450-f0780240dc42/2ff49e1b-8f44-4332-bba9-777d55ff62c4/tmp-sparse.vmdk to [datastore1] vmware_temp/d8bb06a9-33b8-4db3-a450-f0780240dc42/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 706.213138] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-66e493f1-8d16-4cfd-a603-dfb3f5acdd0e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.221449] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Waiting for the task: (returnval){ [ 706.221449] env[63345]: value = "task-1016831" [ 706.221449] env[63345]: _type = "Task" [ 706.221449] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.230264] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016831, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.258662] env[63345]: DEBUG nova.network.neutron [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Successfully updated port: 9b0555db-b627-44ae-8812-42415d554cde {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 706.354569] env[63345]: DEBUG oslo_vmware.api [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Task: {'id': task-1016829, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218782} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.354825] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 706.355009] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 706.355191] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 706.355361] env[63345]: INFO nova.compute.manager [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Took 1.15 seconds to destroy the instance on the hypervisor. [ 706.355595] env[63345]: DEBUG oslo.service.loopingcall [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 706.355787] env[63345]: DEBUG nova.compute.manager [-] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 706.355932] env[63345]: DEBUG nova.network.neutron [-] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 706.366782] env[63345]: DEBUG nova.compute.manager [req-d9b904a9-4157-4e50-87be-abe49d92efb5 req-31ce89ed-f262-471b-b287-5ef0782e1f38 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Received event network-vif-plugged-9b0555db-b627-44ae-8812-42415d554cde {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 706.366782] env[63345]: DEBUG oslo_concurrency.lockutils [req-d9b904a9-4157-4e50-87be-abe49d92efb5 req-31ce89ed-f262-471b-b287-5ef0782e1f38 service nova] Acquiring lock "869f8110-6490-4a47-955a-0ce085f826af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 706.366782] env[63345]: DEBUG oslo_concurrency.lockutils [req-d9b904a9-4157-4e50-87be-abe49d92efb5 req-31ce89ed-f262-471b-b287-5ef0782e1f38 service nova] Lock "869f8110-6490-4a47-955a-0ce085f826af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.366782] env[63345]: DEBUG oslo_concurrency.lockutils [req-d9b904a9-4157-4e50-87be-abe49d92efb5 req-31ce89ed-f262-471b-b287-5ef0782e1f38 service nova] Lock "869f8110-6490-4a47-955a-0ce085f826af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.366782] env[63345]: DEBUG nova.compute.manager [req-d9b904a9-4157-4e50-87be-abe49d92efb5 req-31ce89ed-f262-471b-b287-5ef0782e1f38 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] No waiting events found dispatching network-vif-plugged-9b0555db-b627-44ae-8812-42415d554cde {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 706.367109] env[63345]: WARNING nova.compute.manager [req-d9b904a9-4157-4e50-87be-abe49d92efb5 req-31ce89ed-f262-471b-b287-5ef0782e1f38 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Received unexpected event network-vif-plugged-9b0555db-b627-44ae-8812-42415d554cde for instance with vm_state building and task_state spawning. [ 706.391077] env[63345]: DEBUG nova.compute.manager [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 706.400787] env[63345]: DEBUG nova.network.neutron [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Successfully created port: a41f3ab6-3d63-4295-a7d4-ab01b18deef6 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 706.457979] env[63345]: DEBUG nova.network.neutron [req-07cc0527-8046-4e6d-9f5d-d5b5e5d9cbed req-3fd461ce-c4c1-49fc-b34e-4aceca9dd3ff service nova] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Updated VIF entry in instance network info cache for port 32300854-3281-41f9-8ba4-87a1c457e72c. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 706.458365] env[63345]: DEBUG nova.network.neutron [req-07cc0527-8046-4e6d-9f5d-d5b5e5d9cbed req-3fd461ce-c4c1-49fc-b34e-4aceca9dd3ff service nova] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Updating instance_info_cache with network_info: [{"id": "32300854-3281-41f9-8ba4-87a1c457e72c", "address": "fa:16:3e:b2:ca:8b", "network": {"id": "e181c356-7853-4158-96d0-47f099cb900b", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-735360173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1dab31b160f43b7897e0eac3d2024a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32300854-32", "ovs_interfaceid": "32300854-3281-41f9-8ba4-87a1c457e72c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.570517] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb324971-1719-466e-9f92-bac95fda5122 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.578687] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f26747-a915-4add-af63-0097c1a63412 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.616882] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc857627-f470-454d-b0f3-17338447650d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.626548] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f7754e-f751-4ee2-bb5b-a8c36a7c83f0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.642441] env[63345]: DEBUG nova.compute.provider_tree [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 706.704097] env[63345]: DEBUG oslo_vmware.api [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016830, 'name': ReconfigVM_Task, 'duration_secs': 0.176835} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.704097] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Reconfigured VM instance instance-0000001f to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 706.704707] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd08d57e-c553-4983-b944-54b3703c1b53 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.728735] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] 27c6dc17-4ded-4fe7-8fba-265eae64fc32/27c6dc17-4ded-4fe7-8fba-265eae64fc32.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 706.729227] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad5867c9-4625-4e12-b583-59150d5b3942 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.754024] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016831, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.754024] env[63345]: DEBUG oslo_vmware.api [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 706.754024] env[63345]: value = "task-1016832" [ 706.754024] env[63345]: _type = "Task" [ 706.754024] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.765760] env[63345]: DEBUG oslo_concurrency.lockutils [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "refresh_cache-869f8110-6490-4a47-955a-0ce085f826af" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.765760] env[63345]: DEBUG oslo_concurrency.lockutils [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired lock "refresh_cache-869f8110-6490-4a47-955a-0ce085f826af" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.765760] env[63345]: DEBUG nova.network.neutron [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 706.765760] env[63345]: DEBUG oslo_vmware.api [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016832, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.965102] env[63345]: DEBUG oslo_concurrency.lockutils [req-07cc0527-8046-4e6d-9f5d-d5b5e5d9cbed req-3fd461ce-c4c1-49fc-b34e-4aceca9dd3ff service nova] Releasing lock "refresh_cache-30755716-03a7-41bd-90c2-7ef21baf9975" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.112770] env[63345]: DEBUG nova.network.neutron [req-7ee041f4-e7d8-4ede-ad8b-3f9ac9fe5876 req-2037ff20-88a5-46ec-b1bd-4ace6399d366 service nova] [instance: f043239f-7158-4199-a784-d711a5a301be] Updated VIF entry in instance network info cache for port a8153077-1984-4619-ae74-08c5902cfff8. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 707.113160] env[63345]: DEBUG nova.network.neutron [req-7ee041f4-e7d8-4ede-ad8b-3f9ac9fe5876 req-2037ff20-88a5-46ec-b1bd-4ace6399d366 service nova] [instance: f043239f-7158-4199-a784-d711a5a301be] Updating instance_info_cache with network_info: [{"id": "a8153077-1984-4619-ae74-08c5902cfff8", "address": "fa:16:3e:85:4a:56", "network": {"id": "2353863a-5385-41e5-b8e5-a376182d8801", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-647406981-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "705d13cb3bd04eebbfccb1353523d6a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c7821ea-f92f-4f06-a4cb-05e1186a9d22", "external-id": "nsx-vlan-transportzone-69", "segmentation_id": 69, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8153077-19", "ovs_interfaceid": "a8153077-1984-4619-ae74-08c5902cfff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.145186] env[63345]: DEBUG nova.scheduler.client.report [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 707.239727] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016831, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.969158} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.240008] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Copied Virtual Disk [datastore1] vmware_temp/d8bb06a9-33b8-4db3-a450-f0780240dc42/2ff49e1b-8f44-4332-bba9-777d55ff62c4/tmp-sparse.vmdk to [datastore1] vmware_temp/d8bb06a9-33b8-4db3-a450-f0780240dc42/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 707.240245] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Deleting the datastore file [datastore1] vmware_temp/d8bb06a9-33b8-4db3-a450-f0780240dc42/2ff49e1b-8f44-4332-bba9-777d55ff62c4/tmp-sparse.vmdk {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 707.240526] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9bbceaf3-ff90-4691-9b73-7d44c9ad6a8c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.247780] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Waiting for the task: (returnval){ [ 707.247780] env[63345]: value = "task-1016833" [ 707.247780] env[63345]: _type = "Task" [ 707.247780] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.256398] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016833, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.265192] env[63345]: DEBUG oslo_vmware.api [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016832, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.292776] env[63345]: DEBUG nova.network.neutron [-] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.316613] env[63345]: DEBUG nova.network.neutron [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 707.402636] env[63345]: DEBUG nova.compute.manager [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 707.428477] env[63345]: DEBUG nova.virt.hardware [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 707.428743] env[63345]: DEBUG nova.virt.hardware [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 707.428900] env[63345]: DEBUG nova.virt.hardware [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 707.429090] env[63345]: DEBUG nova.virt.hardware [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 707.429242] env[63345]: DEBUG nova.virt.hardware [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 707.429390] env[63345]: DEBUG nova.virt.hardware [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 707.429599] env[63345]: DEBUG nova.virt.hardware [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 707.429756] env[63345]: DEBUG nova.virt.hardware [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 707.429922] env[63345]: DEBUG nova.virt.hardware [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 707.430117] env[63345]: DEBUG nova.virt.hardware [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 707.430320] env[63345]: DEBUG nova.virt.hardware [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 707.431282] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67adc97-ee84-4a88-a284-30dca4d4684e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.440187] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b6d32e-690e-40a6-aa88-289ce52acc26 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.458340] env[63345]: DEBUG nova.objects.instance [None req-4b034b9f-9474-42d8-872b-53dcbafc0dd2 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lazy-loading 'flavor' on Instance uuid 3e4e58bd-903b-4b3d-8be4-5678aab6c721 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 707.495291] env[63345]: DEBUG nova.network.neutron [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Updating instance_info_cache with network_info: [{"id": "9b0555db-b627-44ae-8812-42415d554cde", "address": "fa:16:3e:4c:4f:70", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b0555db-b6", "ovs_interfaceid": "9b0555db-b627-44ae-8812-42415d554cde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.615788] env[63345]: DEBUG oslo_concurrency.lockutils [req-7ee041f4-e7d8-4ede-ad8b-3f9ac9fe5876 req-2037ff20-88a5-46ec-b1bd-4ace6399d366 service nova] Releasing lock "refresh_cache-f043239f-7158-4199-a784-d711a5a301be" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.653462] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.270s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.655802] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.105s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.657292] env[63345]: INFO nova.compute.claims [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 707.679054] env[63345]: INFO nova.scheduler.client.report [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Deleted allocations for instance 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6 [ 707.765501] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016833, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08745} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.765898] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 707.766321] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Moving file from [datastore1] vmware_temp/d8bb06a9-33b8-4db3-a450-f0780240dc42/2ff49e1b-8f44-4332-bba9-777d55ff62c4 to [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4. {{(pid=63345) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 707.767091] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-37906930-b0b9-457b-acfa-b5995efb111a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.774354] env[63345]: DEBUG oslo_vmware.api [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016832, 'name': ReconfigVM_Task, 'duration_secs': 0.86285} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.775171] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Reconfigured VM instance instance-0000001f to attach disk [datastore2] 27c6dc17-4ded-4fe7-8fba-265eae64fc32/27c6dc17-4ded-4fe7-8fba-265eae64fc32.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 707.775577] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Updating instance '27c6dc17-4ded-4fe7-8fba-265eae64fc32' progress to 50 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 707.783270] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Waiting for the task: (returnval){ [ 707.783270] env[63345]: value = "task-1016834" [ 707.783270] env[63345]: _type = "Task" [ 707.783270] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.791413] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016834, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.797994] env[63345]: INFO nova.compute.manager [-] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Took 1.44 seconds to deallocate network for instance. [ 707.910668] env[63345]: DEBUG nova.network.neutron [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Successfully updated port: a41f3ab6-3d63-4295-a7d4-ab01b18deef6 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 707.962769] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b034b9f-9474-42d8-872b-53dcbafc0dd2 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "refresh_cache-3e4e58bd-903b-4b3d-8be4-5678aab6c721" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.963158] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b034b9f-9474-42d8-872b-53dcbafc0dd2 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquired lock "refresh_cache-3e4e58bd-903b-4b3d-8be4-5678aab6c721" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.963158] env[63345]: DEBUG nova.network.neutron [None req-4b034b9f-9474-42d8-872b-53dcbafc0dd2 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 707.963312] env[63345]: DEBUG nova.objects.instance [None req-4b034b9f-9474-42d8-872b-53dcbafc0dd2 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lazy-loading 'info_cache' on Instance uuid 3e4e58bd-903b-4b3d-8be4-5678aab6c721 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 707.997480] env[63345]: DEBUG oslo_concurrency.lockutils [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Releasing lock "refresh_cache-869f8110-6490-4a47-955a-0ce085f826af" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.997783] env[63345]: DEBUG nova.compute.manager [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Instance network_info: |[{"id": "9b0555db-b627-44ae-8812-42415d554cde", "address": "fa:16:3e:4c:4f:70", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b0555db-b6", "ovs_interfaceid": "9b0555db-b627-44ae-8812-42415d554cde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 707.998246] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:4f:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '68add7d6-c025-46fa-84d3-9c589adb63e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9b0555db-b627-44ae-8812-42415d554cde', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 708.005998] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Creating folder: Project (cb91ecf5d00e48dea9baf2122ac4fed7). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 708.006555] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f38bf616-2117-43a2-8bde-61a7d55de905 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.017281] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Created folder: Project (cb91ecf5d00e48dea9baf2122ac4fed7) in parent group-v225918. [ 708.017444] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Creating folder: Instances. Parent ref: group-v225980. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 708.017656] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0531a8a5-e0d5-46ec-9fd9-6e1217df791d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.026286] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Created folder: Instances in parent group-v225980. [ 708.026509] env[63345]: DEBUG oslo.service.loopingcall [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 708.026684] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 708.026866] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-048edb57-7ca7-4104-956e-14f16a8aa473 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.045170] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 708.045170] env[63345]: value = "task-1016837" [ 708.045170] env[63345]: _type = "Task" [ 708.045170] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.052601] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016837, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.169511] env[63345]: DEBUG nova.compute.manager [req-b2247e81-6738-4a58-83a0-29e2685d58ba req-6f136c78-696e-438a-8575-b78ea72038e3 service nova] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Received event network-vif-plugged-a41f3ab6-3d63-4295-a7d4-ab01b18deef6 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 708.169691] env[63345]: DEBUG oslo_concurrency.lockutils [req-b2247e81-6738-4a58-83a0-29e2685d58ba req-6f136c78-696e-438a-8575-b78ea72038e3 service nova] Acquiring lock "abc81fa5-78a9-48b1-a49e-2faffddf2411-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.171293] env[63345]: DEBUG oslo_concurrency.lockutils [req-b2247e81-6738-4a58-83a0-29e2685d58ba req-6f136c78-696e-438a-8575-b78ea72038e3 service nova] Lock "abc81fa5-78a9-48b1-a49e-2faffddf2411-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.171293] env[63345]: DEBUG oslo_concurrency.lockutils [req-b2247e81-6738-4a58-83a0-29e2685d58ba req-6f136c78-696e-438a-8575-b78ea72038e3 service nova] Lock "abc81fa5-78a9-48b1-a49e-2faffddf2411-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.171293] env[63345]: DEBUG nova.compute.manager [req-b2247e81-6738-4a58-83a0-29e2685d58ba req-6f136c78-696e-438a-8575-b78ea72038e3 service nova] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] No waiting events found dispatching network-vif-plugged-a41f3ab6-3d63-4295-a7d4-ab01b18deef6 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 708.172273] env[63345]: WARNING nova.compute.manager [req-b2247e81-6738-4a58-83a0-29e2685d58ba req-6f136c78-696e-438a-8575-b78ea72038e3 service nova] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Received unexpected event network-vif-plugged-a41f3ab6-3d63-4295-a7d4-ab01b18deef6 for instance with vm_state building and task_state spawning. [ 708.172273] env[63345]: DEBUG nova.compute.manager [req-b2247e81-6738-4a58-83a0-29e2685d58ba req-6f136c78-696e-438a-8575-b78ea72038e3 service nova] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Received event network-changed-a41f3ab6-3d63-4295-a7d4-ab01b18deef6 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 708.172273] env[63345]: DEBUG nova.compute.manager [req-b2247e81-6738-4a58-83a0-29e2685d58ba req-6f136c78-696e-438a-8575-b78ea72038e3 service nova] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Refreshing instance network info cache due to event network-changed-a41f3ab6-3d63-4295-a7d4-ab01b18deef6. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 708.172273] env[63345]: DEBUG oslo_concurrency.lockutils [req-b2247e81-6738-4a58-83a0-29e2685d58ba req-6f136c78-696e-438a-8575-b78ea72038e3 service nova] Acquiring lock "refresh_cache-abc81fa5-78a9-48b1-a49e-2faffddf2411" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.172273] env[63345]: DEBUG oslo_concurrency.lockutils [req-b2247e81-6738-4a58-83a0-29e2685d58ba req-6f136c78-696e-438a-8575-b78ea72038e3 service nova] Acquired lock "refresh_cache-abc81fa5-78a9-48b1-a49e-2faffddf2411" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.172553] env[63345]: DEBUG nova.network.neutron [req-b2247e81-6738-4a58-83a0-29e2685d58ba req-6f136c78-696e-438a-8575-b78ea72038e3 service nova] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Refreshing network info cache for port a41f3ab6-3d63-4295-a7d4-ab01b18deef6 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 708.189187] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2551010-1e6c-414f-b6b5-e43b698e5890 tempest-InstanceActionsV221TestJSON-611793262 tempest-InstanceActionsV221TestJSON-611793262-project-member] Lock "4d41f4a7-4fde-4d34-be7c-533c00fe5ae6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.979s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.287108] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a9a6872-5f6e-4789-ac37-ff05f6e4994a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.299801] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016834, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.051376} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.312725] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] File moved {{(pid=63345) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 708.312958] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Cleaning up location [datastore1] vmware_temp/d8bb06a9-33b8-4db3-a450-f0780240dc42 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 708.313396] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Deleting the datastore file [datastore1] vmware_temp/d8bb06a9-33b8-4db3-a450-f0780240dc42 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 708.314040] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.314787] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50c9d3ba-087b-4a6f-a1e1-4745b8542b48 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.317768] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54d327b-0b04-4494-bb9d-59ea11fe8fb2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.341418] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Updating instance '27c6dc17-4ded-4fe7-8fba-265eae64fc32' progress to 67 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 708.345958] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Waiting for the task: (returnval){ [ 708.345958] env[63345]: value = "task-1016838" [ 708.345958] env[63345]: _type = "Task" [ 708.345958] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.355122] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016838, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.414395] env[63345]: DEBUG oslo_concurrency.lockutils [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Acquiring lock "refresh_cache-abc81fa5-78a9-48b1-a49e-2faffddf2411" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.466714] env[63345]: DEBUG nova.objects.base [None req-4b034b9f-9474-42d8-872b-53dcbafc0dd2 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Object Instance<3e4e58bd-903b-4b3d-8be4-5678aab6c721> lazy-loaded attributes: flavor,info_cache {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 708.555090] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016837, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.711098] env[63345]: DEBUG nova.network.neutron [req-b2247e81-6738-4a58-83a0-29e2685d58ba req-6f136c78-696e-438a-8575-b78ea72038e3 service nova] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 708.774317] env[63345]: DEBUG nova.compute.manager [req-7e78f66d-feda-4f11-ae16-30100457ac0f req-43149c3f-221b-4b72-9ee8-b16ec8985a17 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Received event network-changed-9b0555db-b627-44ae-8812-42415d554cde {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 708.774558] env[63345]: DEBUG nova.compute.manager [req-7e78f66d-feda-4f11-ae16-30100457ac0f req-43149c3f-221b-4b72-9ee8-b16ec8985a17 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Refreshing instance network info cache due to event network-changed-9b0555db-b627-44ae-8812-42415d554cde. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 708.774716] env[63345]: DEBUG oslo_concurrency.lockutils [req-7e78f66d-feda-4f11-ae16-30100457ac0f req-43149c3f-221b-4b72-9ee8-b16ec8985a17 service nova] Acquiring lock "refresh_cache-869f8110-6490-4a47-955a-0ce085f826af" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.774859] env[63345]: DEBUG oslo_concurrency.lockutils [req-7e78f66d-feda-4f11-ae16-30100457ac0f req-43149c3f-221b-4b72-9ee8-b16ec8985a17 service nova] Acquired lock "refresh_cache-869f8110-6490-4a47-955a-0ce085f826af" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.779023] env[63345]: DEBUG nova.network.neutron [req-7e78f66d-feda-4f11-ae16-30100457ac0f req-43149c3f-221b-4b72-9ee8-b16ec8985a17 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Refreshing network info cache for port 9b0555db-b627-44ae-8812-42415d554cde {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 708.795315] env[63345]: DEBUG nova.network.neutron [req-b2247e81-6738-4a58-83a0-29e2685d58ba req-6f136c78-696e-438a-8575-b78ea72038e3 service nova] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.863655] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016838, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.030438} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.864100] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 708.864910] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e096869-2763-4ee3-99da-6defaf9cd4f3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.873124] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Waiting for the task: (returnval){ [ 708.873124] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52827f27-cc71-1cab-f1a7-0b97fd9efb56" [ 708.873124] env[63345]: _type = "Task" [ 708.873124] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.883520] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52827f27-cc71-1cab-f1a7-0b97fd9efb56, 'name': SearchDatastore_Task, 'duration_secs': 0.009442} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.884044] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.884422] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] 30755716-03a7-41bd-90c2-7ef21baf9975/30755716-03a7-41bd-90c2-7ef21baf9975.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 708.884903] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a95f13c4-5f6c-4a56-98c6-5d208e3ab94f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.896115] env[63345]: DEBUG nova.network.neutron [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Port 8c1bd582-6867-4cba-9522-0e03560fa3f7 binding to destination host cpu-1 is already ACTIVE {{(pid=63345) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 708.896486] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Waiting for the task: (returnval){ [ 708.896486] env[63345]: value = "task-1016839" [ 708.896486] env[63345]: _type = "Task" [ 708.896486] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.907612] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016839, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.060818] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016837, 'name': CreateVM_Task, 'duration_secs': 0.614918} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.061058] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 709.061941] env[63345]: DEBUG oslo_concurrency.lockutils [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.061941] env[63345]: DEBUG oslo_concurrency.lockutils [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.062285] env[63345]: DEBUG oslo_concurrency.lockutils [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 709.063227] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0740dda-89fa-4153-a660-cdec2943dd90 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.069433] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 709.069433] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5223cfe5-3849-2529-4303-2b0867bb393e" [ 709.069433] env[63345]: _type = "Task" [ 709.069433] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.087078] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5223cfe5-3849-2529-4303-2b0867bb393e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.197473] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f54158d-b27d-40e9-b825-d2aa5b3582c7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.207302] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8602cc21-bc96-45ee-9884-2cd5e47f6456 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.243509] env[63345]: DEBUG nova.network.neutron [None req-4b034b9f-9474-42d8-872b-53dcbafc0dd2 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Updating instance_info_cache with network_info: [{"id": "1e56115d-0d69-4b50-8607-b08677046c73", "address": "fa:16:3e:98:00:76", "network": {"id": "1888e7dd-bfd6-49d6-afb2-6ba1b22314cc", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-70760398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1f3a565957a4316af1b8fa14f81e75a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e56115d-0d", "ovs_interfaceid": "1e56115d-0d69-4b50-8607-b08677046c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.246247] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41489d38-4447-4a66-8aa6-4a904ce5bcef {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.256251] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d330021a-512e-4520-8fc4-15e02800aa72 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.273126] env[63345]: DEBUG nova.compute.provider_tree [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.298039] env[63345]: DEBUG oslo_concurrency.lockutils [req-b2247e81-6738-4a58-83a0-29e2685d58ba req-6f136c78-696e-438a-8575-b78ea72038e3 service nova] Releasing lock "refresh_cache-abc81fa5-78a9-48b1-a49e-2faffddf2411" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.298488] env[63345]: DEBUG oslo_concurrency.lockutils [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Acquired lock "refresh_cache-abc81fa5-78a9-48b1-a49e-2faffddf2411" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.298740] env[63345]: DEBUG nova.network.neutron [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 709.417480] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016839, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483191} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.417744] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] 30755716-03a7-41bd-90c2-7ef21baf9975/30755716-03a7-41bd-90c2-7ef21baf9975.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 709.417970] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 709.418539] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-746f9c4c-2a7d-489a-9fb3-81d8963d4a1a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.427685] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Waiting for the task: (returnval){ [ 709.427685] env[63345]: value = "task-1016840" [ 709.427685] env[63345]: _type = "Task" [ 709.427685] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.438903] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016840, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.581648] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5223cfe5-3849-2529-4303-2b0867bb393e, 'name': SearchDatastore_Task, 'duration_secs': 0.061261} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.581648] env[63345]: DEBUG oslo_concurrency.lockutils [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.581648] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 709.581648] env[63345]: DEBUG oslo_concurrency.lockutils [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.581828] env[63345]: DEBUG oslo_concurrency.lockutils [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.581877] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 709.582155] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f8a70aa-4847-4bbc-b4ca-f0fad040b68c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.591223] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 709.591460] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 709.592232] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b704fc7-96d4-402c-9a2d-62296fa1d85c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.599231] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 709.599231] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]520298de-28e5-7b3f-3ad4-47f0a34310a0" [ 709.599231] env[63345]: _type = "Task" [ 709.599231] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.607237] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]520298de-28e5-7b3f-3ad4-47f0a34310a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.639060] env[63345]: DEBUG nova.network.neutron [req-7e78f66d-feda-4f11-ae16-30100457ac0f req-43149c3f-221b-4b72-9ee8-b16ec8985a17 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Updated VIF entry in instance network info cache for port 9b0555db-b627-44ae-8812-42415d554cde. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 709.639641] env[63345]: DEBUG nova.network.neutron [req-7e78f66d-feda-4f11-ae16-30100457ac0f req-43149c3f-221b-4b72-9ee8-b16ec8985a17 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Updating instance_info_cache with network_info: [{"id": "9b0555db-b627-44ae-8812-42415d554cde", "address": "fa:16:3e:4c:4f:70", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b0555db-b6", "ovs_interfaceid": "9b0555db-b627-44ae-8812-42415d554cde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.752052] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b034b9f-9474-42d8-872b-53dcbafc0dd2 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Releasing lock "refresh_cache-3e4e58bd-903b-4b3d-8be4-5678aab6c721" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.778959] env[63345]: DEBUG nova.scheduler.client.report [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 709.832260] env[63345]: DEBUG nova.network.neutron [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 709.926835] env[63345]: DEBUG oslo_concurrency.lockutils [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "27c6dc17-4ded-4fe7-8fba-265eae64fc32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.927081] env[63345]: DEBUG oslo_concurrency.lockutils [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "27c6dc17-4ded-4fe7-8fba-265eae64fc32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.927262] env[63345]: DEBUG oslo_concurrency.lockutils [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "27c6dc17-4ded-4fe7-8fba-265eae64fc32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.945725] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016840, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068953} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.946893] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 709.947695] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babfaf45-126a-4150-95da-52fd820d355c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.978864] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 30755716-03a7-41bd-90c2-7ef21baf9975/30755716-03a7-41bd-90c2-7ef21baf9975.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 709.979183] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9304c183-4b78-42b9-9376-21fd73681041 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.993737] env[63345]: DEBUG nova.network.neutron [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Updating instance_info_cache with network_info: [{"id": "a41f3ab6-3d63-4295-a7d4-ab01b18deef6", "address": "fa:16:3e:c3:b7:17", "network": {"id": "fff4d727-d394-4eb1-b3cc-da3d3a5fe196", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1994083979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b367b9e5b9f945b6aad6b9b5f9900e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47ca1ce6-8148-48d5-bcfe-89e39b73914e", "external-id": "nsx-vlan-transportzone-259", "segmentation_id": 259, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa41f3ab6-3d", "ovs_interfaceid": "a41f3ab6-3d63-4295-a7d4-ab01b18deef6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.004600] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Waiting for the task: (returnval){ [ 710.004600] env[63345]: value = "task-1016841" [ 710.004600] env[63345]: _type = "Task" [ 710.004600] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.014318] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016841, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.109609] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]520298de-28e5-7b3f-3ad4-47f0a34310a0, 'name': SearchDatastore_Task, 'duration_secs': 0.009852} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.110459] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14979a08-46dc-4b9d-9cb9-5d78d15c6fc3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.115905] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 710.115905] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52dc30c2-1d12-3011-f738-5f12c9ef26a9" [ 710.115905] env[63345]: _type = "Task" [ 710.115905] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.123193] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52dc30c2-1d12-3011-f738-5f12c9ef26a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.142916] env[63345]: DEBUG oslo_concurrency.lockutils [req-7e78f66d-feda-4f11-ae16-30100457ac0f req-43149c3f-221b-4b72-9ee8-b16ec8985a17 service nova] Releasing lock "refresh_cache-869f8110-6490-4a47-955a-0ce085f826af" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.143197] env[63345]: DEBUG nova.compute.manager [req-7e78f66d-feda-4f11-ae16-30100457ac0f req-43149c3f-221b-4b72-9ee8-b16ec8985a17 service nova] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Received event network-vif-deleted-6950f2a7-3573-46f0-9c46-3301f7ebcf5f {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 710.288198] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.631s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.288198] env[63345]: DEBUG nova.compute.manager [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 710.290021] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.294s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.291726] env[63345]: INFO nova.compute.claims [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 710.499219] env[63345]: DEBUG oslo_concurrency.lockutils [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Releasing lock "refresh_cache-abc81fa5-78a9-48b1-a49e-2faffddf2411" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.501071] env[63345]: DEBUG nova.compute.manager [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Instance network_info: |[{"id": "a41f3ab6-3d63-4295-a7d4-ab01b18deef6", "address": "fa:16:3e:c3:b7:17", "network": {"id": "fff4d727-d394-4eb1-b3cc-da3d3a5fe196", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1994083979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b367b9e5b9f945b6aad6b9b5f9900e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47ca1ce6-8148-48d5-bcfe-89e39b73914e", "external-id": "nsx-vlan-transportzone-259", "segmentation_id": 259, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa41f3ab6-3d", "ovs_interfaceid": "a41f3ab6-3d63-4295-a7d4-ab01b18deef6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 710.501499] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:b7:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '47ca1ce6-8148-48d5-bcfe-89e39b73914e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a41f3ab6-3d63-4295-a7d4-ab01b18deef6', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 710.509558] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Creating folder: Project (b367b9e5b9f945b6aad6b9b5f9900e8c). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 710.510472] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-832ad902-30e5-4950-94bf-df62a2256b6b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.521901] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016841, 'name': ReconfigVM_Task, 'duration_secs': 0.290369} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.522166] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 30755716-03a7-41bd-90c2-7ef21baf9975/30755716-03a7-41bd-90c2-7ef21baf9975.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 710.522750] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb1228c5-686e-48b5-a587-a6d351a65912 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.525757] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Created folder: Project (b367b9e5b9f945b6aad6b9b5f9900e8c) in parent group-v225918. [ 710.525757] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Creating folder: Instances. Parent ref: group-v225983. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 710.525926] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5721b20-df92-40b6-b718-649f02440ee9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.528782] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Waiting for the task: (returnval){ [ 710.528782] env[63345]: value = "task-1016843" [ 710.528782] env[63345]: _type = "Task" [ 710.528782] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.536089] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Created folder: Instances in parent group-v225983. [ 710.536308] env[63345]: DEBUG oslo.service.loopingcall [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 710.536771] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 710.536964] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-04ccd676-ca5e-4a3e-a0fa-b6538a99ed9d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.553878] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016843, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.558128] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 710.558128] env[63345]: value = "task-1016845" [ 710.558128] env[63345]: _type = "Task" [ 710.558128] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.565864] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016845, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.631408] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52dc30c2-1d12-3011-f738-5f12c9ef26a9, 'name': SearchDatastore_Task, 'duration_secs': 0.014113} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.631741] env[63345]: DEBUG oslo_concurrency.lockutils [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.632222] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] 869f8110-6490-4a47-955a-0ce085f826af/869f8110-6490-4a47-955a-0ce085f826af.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 710.632442] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5a8f6db-76ef-4190-8694-c88402e196c8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.639338] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 710.639338] env[63345]: value = "task-1016846" [ 710.639338] env[63345]: _type = "Task" [ 710.639338] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.648022] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1016846, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.758570] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b034b9f-9474-42d8-872b-53dcbafc0dd2 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 710.759112] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1c9ac8d8-ab62-4342-9070-c473024ac0b8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.766640] env[63345]: DEBUG oslo_vmware.api [None req-4b034b9f-9474-42d8-872b-53dcbafc0dd2 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 710.766640] env[63345]: value = "task-1016847" [ 710.766640] env[63345]: _type = "Task" [ 710.766640] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.775572] env[63345]: DEBUG oslo_vmware.api [None req-4b034b9f-9474-42d8-872b-53dcbafc0dd2 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016847, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.796234] env[63345]: DEBUG nova.compute.utils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 710.800217] env[63345]: DEBUG nova.compute.manager [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 710.800217] env[63345]: DEBUG nova.network.neutron [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 710.949040] env[63345]: DEBUG nova.policy [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e463e6e279284d979d49753ff4d07572', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea42057a148e453d869b4af82bdb21bb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 711.039582] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016843, 'name': Rename_Task, 'duration_secs': 0.148115} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.040304] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 711.040791] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-69d7f004-4319-4f8b-a270-e22455dbf155 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.047907] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Waiting for the task: (returnval){ [ 711.047907] env[63345]: value = "task-1016848" [ 711.047907] env[63345]: _type = "Task" [ 711.047907] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.053204] env[63345]: DEBUG oslo_concurrency.lockutils [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "refresh_cache-27c6dc17-4ded-4fe7-8fba-265eae64fc32" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.053561] env[63345]: DEBUG oslo_concurrency.lockutils [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquired lock "refresh_cache-27c6dc17-4ded-4fe7-8fba-265eae64fc32" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.053876] env[63345]: DEBUG nova.network.neutron [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 711.064127] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016848, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.077720] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016845, 'name': CreateVM_Task, 'duration_secs': 0.33791} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.078340] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 711.080679] env[63345]: DEBUG oslo_concurrency.lockutils [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.082068] env[63345]: DEBUG oslo_concurrency.lockutils [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.082068] env[63345]: DEBUG oslo_concurrency.lockutils [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 711.082624] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9a1bc17-8950-474d-a1fc-168e76361fbe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.090018] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Waiting for the task: (returnval){ [ 711.090018] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d27c49-9324-de39-07f8-f055eadb2afb" [ 711.090018] env[63345]: _type = "Task" [ 711.090018] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.099745] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d27c49-9324-de39-07f8-f055eadb2afb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.152029] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1016846, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.279726] env[63345]: DEBUG oslo_vmware.api [None req-4b034b9f-9474-42d8-872b-53dcbafc0dd2 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016847, 'name': PowerOnVM_Task, 'duration_secs': 0.435604} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.280167] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b034b9f-9474-42d8-872b-53dcbafc0dd2 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 711.280476] env[63345]: DEBUG nova.compute.manager [None req-4b034b9f-9474-42d8-872b-53dcbafc0dd2 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 711.281759] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b77ab1cc-de7c-4931-8d2c-cac3b265f84a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.303659] env[63345]: DEBUG nova.compute.manager [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 711.389990] env[63345]: DEBUG nova.network.neutron [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Successfully created port: 613b8631-b2ef-4da4-8e79-67f2fda08ab5 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 711.563441] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016848, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.601831] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d27c49-9324-de39-07f8-f055eadb2afb, 'name': SearchDatastore_Task, 'duration_secs': 0.059855} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.605300] env[63345]: DEBUG oslo_concurrency.lockutils [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.606228] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 711.606228] env[63345]: DEBUG oslo_concurrency.lockutils [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.606228] env[63345]: DEBUG oslo_concurrency.lockutils [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.606228] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 711.608019] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ee146ef-07cd-42ea-a921-4ccb05779d90 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.615541] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 711.615817] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 711.617342] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6438b305-b2ce-4936-b5e0-f5c84979bb8a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.625530] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Waiting for the task: (returnval){ [ 711.625530] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52bac3ad-19ea-7228-cf4a-f90c4c954a7c" [ 711.625530] env[63345]: _type = "Task" [ 711.625530] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.634558] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52bac3ad-19ea-7228-cf4a-f90c4c954a7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.652758] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1016846, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.607811} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.653038] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] 869f8110-6490-4a47-955a-0ce085f826af/869f8110-6490-4a47-955a-0ce085f826af.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 711.653341] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 711.653555] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-83f92ce0-f992-4a50-ab48-844ddcac934c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.660582] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 711.660582] env[63345]: value = "task-1016849" [ 711.660582] env[63345]: _type = "Task" [ 711.660582] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.669733] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1016849, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.857154] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-823954a2-81a2-41fe-88aa-f3656cc385db {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.864574] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834d7826-32c5-4838-93a4-503fecb8d44d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.902855] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3066c49e-64a6-411f-8470-bb9227e48c6a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.910973] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2cd0461-cfe8-4428-ae09-ddd43d1df475 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.925413] env[63345]: DEBUG nova.compute.provider_tree [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.058581] env[63345]: DEBUG oslo_vmware.api [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016848, 'name': PowerOnVM_Task, 'duration_secs': 0.515947} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.060849] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 712.061073] env[63345]: INFO nova.compute.manager [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Took 12.65 seconds to spawn the instance on the hypervisor. [ 712.061255] env[63345]: DEBUG nova.compute.manager [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 712.062033] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f65ad67-007e-42d2-9e8e-97c4a644a0cb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.135861] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52bac3ad-19ea-7228-cf4a-f90c4c954a7c, 'name': SearchDatastore_Task, 'duration_secs': 0.012186} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.136716] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-534fddeb-a9ff-4b1c-8f22-630864fa5f52 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.142011] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Waiting for the task: (returnval){ [ 712.142011] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d0f23e-85e4-3c1f-1ae8-be8812cac723" [ 712.142011] env[63345]: _type = "Task" [ 712.142011] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.151596] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d0f23e-85e4-3c1f-1ae8-be8812cac723, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.168864] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1016849, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105901} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.168972] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 712.169807] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a21e32-ecf0-4340-baf5-6e0389da515c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.174264] env[63345]: DEBUG nova.network.neutron [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Updating instance_info_cache with network_info: [{"id": "8c1bd582-6867-4cba-9522-0e03560fa3f7", "address": "fa:16:3e:3d:4f:aa", "network": {"id": "18285fd9-d154-415c-acbb-1494303e3b6c", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5dc99cc64e6c4d83928b309253a8df8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c1bd582-68", "ovs_interfaceid": "8c1bd582-6867-4cba-9522-0e03560fa3f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.199331] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 869f8110-6490-4a47-955a-0ce085f826af/869f8110-6490-4a47-955a-0ce085f826af.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 712.200307] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c45bf67b-548e-405c-b86a-73f813f4f70c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.223285] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 712.223285] env[63345]: value = "task-1016850" [ 712.223285] env[63345]: _type = "Task" [ 712.223285] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.234736] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1016850, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.315168] env[63345]: DEBUG nova.compute.manager [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 712.357172] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 712.357436] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 712.357598] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 712.357779] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 712.358078] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 712.358827] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 712.359283] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 712.359484] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 712.359696] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 712.359869] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 712.360058] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 712.362779] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23462a88-f7dd-4053-b025-6d9e2b7c71cc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.372039] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221588e2-1473-4b3c-8159-a94be8deb5cb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.430118] env[63345]: DEBUG nova.scheduler.client.report [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 712.582488] env[63345]: INFO nova.compute.manager [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Took 44.76 seconds to build instance. [ 712.653671] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d0f23e-85e4-3c1f-1ae8-be8812cac723, 'name': SearchDatastore_Task, 'duration_secs': 0.010105} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.653773] env[63345]: DEBUG oslo_concurrency.lockutils [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.654026] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] abc81fa5-78a9-48b1-a49e-2faffddf2411/abc81fa5-78a9-48b1-a49e-2faffddf2411.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 712.654293] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-521a955f-a310-4433-8b5f-19136fa75dba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.661706] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Waiting for the task: (returnval){ [ 712.661706] env[63345]: value = "task-1016851" [ 712.661706] env[63345]: _type = "Task" [ 712.661706] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.670570] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016851, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.677780] env[63345]: DEBUG oslo_concurrency.lockutils [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Releasing lock "refresh_cache-27c6dc17-4ded-4fe7-8fba-265eae64fc32" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.735765] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1016850, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.935468] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.645s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.938099] env[63345]: DEBUG nova.compute.manager [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 712.940037] env[63345]: DEBUG oslo_concurrency.lockutils [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.583s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.940920] env[63345]: DEBUG nova.objects.instance [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lazy-loading 'resources' on Instance uuid b4a7d6dd-98dc-49d8-b344-1878cd5a3f51 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 712.960762] env[63345]: DEBUG nova.network.neutron [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Successfully updated port: 613b8631-b2ef-4da4-8e79-67f2fda08ab5 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 713.038165] env[63345]: DEBUG nova.compute.manager [req-6af004c3-5a6d-4971-8efb-cd8f3565022a req-7220773b-8f66-4877-b119-03cb63cd866a service nova] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Received event network-vif-plugged-613b8631-b2ef-4da4-8e79-67f2fda08ab5 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 713.038475] env[63345]: DEBUG oslo_concurrency.lockutils [req-6af004c3-5a6d-4971-8efb-cd8f3565022a req-7220773b-8f66-4877-b119-03cb63cd866a service nova] Acquiring lock "78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.038603] env[63345]: DEBUG oslo_concurrency.lockutils [req-6af004c3-5a6d-4971-8efb-cd8f3565022a req-7220773b-8f66-4877-b119-03cb63cd866a service nova] Lock "78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.038752] env[63345]: DEBUG oslo_concurrency.lockutils [req-6af004c3-5a6d-4971-8efb-cd8f3565022a req-7220773b-8f66-4877-b119-03cb63cd866a service nova] Lock "78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.038922] env[63345]: DEBUG nova.compute.manager [req-6af004c3-5a6d-4971-8efb-cd8f3565022a req-7220773b-8f66-4877-b119-03cb63cd866a service nova] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] No waiting events found dispatching network-vif-plugged-613b8631-b2ef-4da4-8e79-67f2fda08ab5 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 713.040163] env[63345]: WARNING nova.compute.manager [req-6af004c3-5a6d-4971-8efb-cd8f3565022a req-7220773b-8f66-4877-b119-03cb63cd866a service nova] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Received unexpected event network-vif-plugged-613b8631-b2ef-4da4-8e79-67f2fda08ab5 for instance with vm_state building and task_state spawning. [ 713.084616] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d9439545-bc67-496e-bd91-2cd82200f624 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Lock "30755716-03a7-41bd-90c2-7ef21baf9975" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 128.516s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.175289] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016851, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.207729] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177045c9-50cb-4fdb-a4f8-80babb731a5c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.230618] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e9d3f7-5c0a-4cbc-a81c-428b0bcf8f19 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.241144] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Updating instance '27c6dc17-4ded-4fe7-8fba-265eae64fc32' progress to 83 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 713.247854] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1016850, 'name': ReconfigVM_Task, 'duration_secs': 0.608274} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.249019] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 869f8110-6490-4a47-955a-0ce085f826af/869f8110-6490-4a47-955a-0ce085f826af.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 713.249180] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f013d9c-d94d-4a87-aea3-b9e558ce3824 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.257824] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 713.257824] env[63345]: value = "task-1016852" [ 713.257824] env[63345]: _type = "Task" [ 713.257824] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.268538] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1016852, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.444338] env[63345]: DEBUG nova.compute.utils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 713.448293] env[63345]: DEBUG nova.compute.manager [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 713.452184] env[63345]: DEBUG nova.network.neutron [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 713.466641] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "refresh_cache-78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.466733] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquired lock "refresh_cache-78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.466868] env[63345]: DEBUG nova.network.neutron [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 713.512536] env[63345]: DEBUG nova.policy [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e463e6e279284d979d49753ff4d07572', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea42057a148e453d869b4af82bdb21bb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 713.589345] env[63345]: DEBUG nova.compute.manager [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 713.677897] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016851, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.704082} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.677897] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] abc81fa5-78a9-48b1-a49e-2faffddf2411/abc81fa5-78a9-48b1-a49e-2faffddf2411.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 713.678226] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 713.678496] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f4010520-f786-4890-8709-ab5127218cf6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.685759] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Waiting for the task: (returnval){ [ 713.685759] env[63345]: value = "task-1016853" [ 713.685759] env[63345]: _type = "Task" [ 713.685759] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.701019] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016853, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.753823] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 713.754986] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96036c1c-8d41-4409-81dc-6c5294e29a5a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.771962] env[63345]: DEBUG oslo_vmware.api [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 713.771962] env[63345]: value = "task-1016854" [ 713.771962] env[63345]: _type = "Task" [ 713.771962] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.781783] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1016852, 'name': Rename_Task, 'duration_secs': 0.270197} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.781955] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 713.782361] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-36252d61-7300-4c80-b709-be8969d2bf46 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.791081] env[63345]: DEBUG oslo_vmware.api [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016854, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.796081] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 713.796081] env[63345]: value = "task-1016855" [ 713.796081] env[63345]: _type = "Task" [ 713.796081] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.806229] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1016855, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.957018] env[63345]: DEBUG nova.compute.manager [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 713.965027] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d0ca58-725e-4470-a7c9-a684f1617f8a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.979877] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e862b6b8-e91f-4d8d-b4ef-97113f386ba9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.984647] env[63345]: DEBUG nova.network.neutron [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Successfully created port: 11bc09f4-1dfe-4b4b-8647-e126d27ae4b4 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 714.020230] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca2ccd28-b1a5-4d6c-a1f5-b16033333a99 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.028299] env[63345]: DEBUG nova.network.neutron [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 714.035090] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9bd521-acf8-4204-a5cf-65576bcc90f4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.052170] env[63345]: DEBUG nova.compute.provider_tree [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.111346] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.199541] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016853, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075887} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.199957] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 714.201336] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c74130-fa12-41ee-908d-2e07a7d2a00e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.227858] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] abc81fa5-78a9-48b1-a49e-2faffddf2411/abc81fa5-78a9-48b1-a49e-2faffddf2411.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 714.228232] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c17a9e90-6c19-4223-93a6-7b28be6bdb60 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.253705] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Waiting for the task: (returnval){ [ 714.253705] env[63345]: value = "task-1016856" [ 714.253705] env[63345]: _type = "Task" [ 714.253705] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.262067] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016856, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.283208] env[63345]: DEBUG oslo_vmware.api [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016854, 'name': PowerOnVM_Task, 'duration_secs': 0.47758} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.283610] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 714.283696] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-284f673a-b59f-4dd5-ad21-e38211d37f8e tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Updating instance '27c6dc17-4ded-4fe7-8fba-265eae64fc32' progress to 100 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 714.301298] env[63345]: DEBUG nova.network.neutron [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Updating instance_info_cache with network_info: [{"id": "613b8631-b2ef-4da4-8e79-67f2fda08ab5", "address": "fa:16:3e:1e:bf:f1", "network": {"id": "51c855a2-1d6a-480d-8107-11fc171d6b4b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-48336875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea42057a148e453d869b4af82bdb21bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap613b8631-b2", "ovs_interfaceid": "613b8631-b2ef-4da4-8e79-67f2fda08ab5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.307640] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1016855, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.556962] env[63345]: DEBUG nova.scheduler.client.report [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 714.764255] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016856, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.809628] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Releasing lock "refresh_cache-78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.809929] env[63345]: DEBUG nova.compute.manager [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Instance network_info: |[{"id": "613b8631-b2ef-4da4-8e79-67f2fda08ab5", "address": "fa:16:3e:1e:bf:f1", "network": {"id": "51c855a2-1d6a-480d-8107-11fc171d6b4b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-48336875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea42057a148e453d869b4af82bdb21bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap613b8631-b2", "ovs_interfaceid": "613b8631-b2ef-4da4-8e79-67f2fda08ab5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 714.810255] env[63345]: DEBUG oslo_vmware.api [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1016855, 'name': PowerOnVM_Task, 'duration_secs': 0.64358} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.810588] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:bf:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa09e855-8af1-419b-b78d-8ffcc94b1bfb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '613b8631-b2ef-4da4-8e79-67f2fda08ab5', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 714.818547] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Creating folder: Project (ea42057a148e453d869b4af82bdb21bb). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 714.818698] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 714.818923] env[63345]: INFO nova.compute.manager [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Took 10.02 seconds to spawn the instance on the hypervisor. [ 714.819117] env[63345]: DEBUG nova.compute.manager [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 714.820103] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7290d444-19ef-45d8-930e-95943fc39b90 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.822427] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8fa1200-d69f-479c-8f06-fa7749e00a34 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.835375] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Created folder: Project (ea42057a148e453d869b4af82bdb21bb) in parent group-v225918. [ 714.835534] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Creating folder: Instances. Parent ref: group-v225986. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 714.835760] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5994e5bb-df0d-4ed7-836e-be7ae2d159b4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.848031] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Created folder: Instances in parent group-v225986. [ 714.848168] env[63345]: DEBUG oslo.service.loopingcall [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 714.848665] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 714.848665] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-df87aab0-0dd5-4e15-8d5f-2f1e663f78bb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.865137] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Acquiring lock "30755716-03a7-41bd-90c2-7ef21baf9975" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.869387] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Lock "30755716-03a7-41bd-90c2-7ef21baf9975" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.869387] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Acquiring lock "30755716-03a7-41bd-90c2-7ef21baf9975-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.869387] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Lock "30755716-03a7-41bd-90c2-7ef21baf9975-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.869387] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Lock "30755716-03a7-41bd-90c2-7ef21baf9975-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.869529] env[63345]: INFO nova.compute.manager [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Terminating instance [ 714.874802] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 714.874802] env[63345]: value = "task-1016859" [ 714.874802] env[63345]: _type = "Task" [ 714.874802] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.882744] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016859, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.973712] env[63345]: DEBUG nova.compute.manager [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 714.999660] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 714.999920] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 715.000114] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 715.000315] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 715.000465] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 715.000629] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 715.000845] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 715.001011] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 715.001187] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 715.001348] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 715.001541] env[63345]: DEBUG nova.virt.hardware [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 715.002451] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9d98aa-9ea9-4e25-b9a2-26482def4685 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.010439] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b72e873-78b4-4f20-b14f-4953695ebfc2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.069134] env[63345]: DEBUG oslo_concurrency.lockutils [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.127s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.069853] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.864s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.070538] env[63345]: DEBUG nova.objects.instance [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lazy-loading 'resources' on Instance uuid 28caa5f5-141a-4ef9-abb3-33a1973d99cf {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 715.096821] env[63345]: INFO nova.scheduler.client.report [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Deleted allocations for instance b4a7d6dd-98dc-49d8-b344-1878cd5a3f51 [ 715.266298] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016856, 'name': ReconfigVM_Task, 'duration_secs': 0.740453} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.266608] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Reconfigured VM instance instance-0000002b to attach disk [datastore1] abc81fa5-78a9-48b1-a49e-2faffddf2411/abc81fa5-78a9-48b1-a49e-2faffddf2411.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 715.267222] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-017bec77-b72a-4b55-af73-36b74e112bde {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.273896] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Waiting for the task: (returnval){ [ 715.273896] env[63345]: value = "task-1016860" [ 715.273896] env[63345]: _type = "Task" [ 715.273896] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.284107] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016860, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.342131] env[63345]: INFO nova.compute.manager [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Took 45.42 seconds to build instance. [ 715.357146] env[63345]: DEBUG nova.compute.manager [req-865abfd4-11b3-42f0-872d-fe663a6cb8d8 req-3cccb20a-cf73-46cb-868a-79d0ba43a4b8 service nova] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Received event network-changed-613b8631-b2ef-4da4-8e79-67f2fda08ab5 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 715.357146] env[63345]: DEBUG nova.compute.manager [req-865abfd4-11b3-42f0-872d-fe663a6cb8d8 req-3cccb20a-cf73-46cb-868a-79d0ba43a4b8 service nova] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Refreshing instance network info cache due to event network-changed-613b8631-b2ef-4da4-8e79-67f2fda08ab5. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 715.357146] env[63345]: DEBUG oslo_concurrency.lockutils [req-865abfd4-11b3-42f0-872d-fe663a6cb8d8 req-3cccb20a-cf73-46cb-868a-79d0ba43a4b8 service nova] Acquiring lock "refresh_cache-78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.357146] env[63345]: DEBUG oslo_concurrency.lockutils [req-865abfd4-11b3-42f0-872d-fe663a6cb8d8 req-3cccb20a-cf73-46cb-868a-79d0ba43a4b8 service nova] Acquired lock "refresh_cache-78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.357146] env[63345]: DEBUG nova.network.neutron [req-865abfd4-11b3-42f0-872d-fe663a6cb8d8 req-3cccb20a-cf73-46cb-868a-79d0ba43a4b8 service nova] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Refreshing network info cache for port 613b8631-b2ef-4da4-8e79-67f2fda08ab5 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 715.373683] env[63345]: DEBUG nova.compute.manager [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 715.373916] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 715.374990] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc739ce-160e-4526-9772-fd0810acd536 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.390410] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016859, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.392964] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 715.393297] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6007e311-410c-49b8-af21-1284b806d2c5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.399545] env[63345]: DEBUG oslo_vmware.api [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Waiting for the task: (returnval){ [ 715.399545] env[63345]: value = "task-1016861" [ 715.399545] env[63345]: _type = "Task" [ 715.399545] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.410053] env[63345]: DEBUG oslo_vmware.api [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016861, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.612069] env[63345]: DEBUG oslo_concurrency.lockutils [None req-06a9855c-2cb2-427e-89b8-67a8824663d4 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "b4a7d6dd-98dc-49d8-b344-1878cd5a3f51" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.744s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.688528] env[63345]: DEBUG nova.network.neutron [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Successfully updated port: 11bc09f4-1dfe-4b4b-8647-e126d27ae4b4 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 715.787899] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016860, 'name': Rename_Task, 'duration_secs': 0.196002} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.788228] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 715.788517] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7cfaf7f-1c9d-45d5-958f-910de79dd945 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.805191] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Waiting for the task: (returnval){ [ 715.805191] env[63345]: value = "task-1016862" [ 715.805191] env[63345]: _type = "Task" [ 715.805191] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.820940] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016862, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.846142] env[63345]: DEBUG oslo_concurrency.lockutils [None req-225ecea1-6155-4a8e-a2fe-3ff0e2f8886e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "869f8110-6490-4a47-955a-0ce085f826af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 129.187s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.891788] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016859, 'name': CreateVM_Task, 'duration_secs': 0.530124} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.892057] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 715.896107] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.896645] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.896645] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 715.896786] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f33af8d6-091d-42b1-93cc-b64fb1c4379c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.911275] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 715.911275] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b08270-14c1-5cfb-15cf-d4bbeb7a24d0" [ 715.911275] env[63345]: _type = "Task" [ 715.911275] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.921602] env[63345]: DEBUG oslo_vmware.api [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016861, 'name': PowerOffVM_Task, 'duration_secs': 0.223255} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.923054] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 715.923333] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 715.923600] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d40f309-7dcd-4ebf-bf47-eaff2c74d310 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.928624] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b08270-14c1-5cfb-15cf-d4bbeb7a24d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.001661] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 716.001970] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Deleting contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 716.002202] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Deleting the datastore file [datastore1] 30755716-03a7-41bd-90c2-7ef21baf9975 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 716.002577] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ec2b3cd-4221-4943-8263-4c14d2c574ad {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.009360] env[63345]: DEBUG oslo_vmware.api [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Waiting for the task: (returnval){ [ 716.009360] env[63345]: value = "task-1016864" [ 716.009360] env[63345]: _type = "Task" [ 716.009360] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.020448] env[63345]: DEBUG oslo_vmware.api [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016864, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.100821] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0cc6ee-6f85-4605-b008-a20a1c6b64da {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.110274] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee38bec-2112-40fe-97be-afb051842e62 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.144794] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b56fc63-e459-4257-9681-42c197c7a28a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.152506] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525c022b-d7c7-4358-9cb0-9622865ee0f5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.167210] env[63345]: DEBUG nova.compute.provider_tree [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.193497] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "refresh_cache-34e0234c-36c4-4878-979b-46f045bd1785" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.193676] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquired lock "refresh_cache-34e0234c-36c4-4878-979b-46f045bd1785" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.193845] env[63345]: DEBUG nova.network.neutron [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 716.323503] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016862, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.349155] env[63345]: DEBUG nova.compute.manager [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 716.423784] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b08270-14c1-5cfb-15cf-d4bbeb7a24d0, 'name': SearchDatastore_Task, 'duration_secs': 0.021332} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.424165] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.424454] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 716.424683] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.424848] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.425039] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 716.425308] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-792250a8-f722-4b6a-b22a-9d1d2e26dd42 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.435231] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 716.435373] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 716.437010] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35185614-a2dc-4729-bec5-46d14d43533d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.444499] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 716.444499] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5206b92c-eaa4-6f3f-9ccf-7089fdf1c6bf" [ 716.444499] env[63345]: _type = "Task" [ 716.444499] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.449929] env[63345]: DEBUG nova.network.neutron [req-865abfd4-11b3-42f0-872d-fe663a6cb8d8 req-3cccb20a-cf73-46cb-868a-79d0ba43a4b8 service nova] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Updated VIF entry in instance network info cache for port 613b8631-b2ef-4da4-8e79-67f2fda08ab5. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 716.450681] env[63345]: DEBUG nova.network.neutron [req-865abfd4-11b3-42f0-872d-fe663a6cb8d8 req-3cccb20a-cf73-46cb-868a-79d0ba43a4b8 service nova] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Updating instance_info_cache with network_info: [{"id": "613b8631-b2ef-4da4-8e79-67f2fda08ab5", "address": "fa:16:3e:1e:bf:f1", "network": {"id": "51c855a2-1d6a-480d-8107-11fc171d6b4b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-48336875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea42057a148e453d869b4af82bdb21bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap613b8631-b2", "ovs_interfaceid": "613b8631-b2ef-4da4-8e79-67f2fda08ab5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.458816] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5206b92c-eaa4-6f3f-9ccf-7089fdf1c6bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.522925] env[63345]: DEBUG oslo_vmware.api [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Task: {'id': task-1016864, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.338333} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.523265] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 716.523698] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Deleted contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 716.523698] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 716.523805] env[63345]: INFO nova.compute.manager [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Took 1.15 seconds to destroy the instance on the hypervisor. [ 716.524033] env[63345]: DEBUG oslo.service.loopingcall [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 716.524225] env[63345]: DEBUG nova.compute.manager [-] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 716.524318] env[63345]: DEBUG nova.network.neutron [-] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 716.671246] env[63345]: DEBUG nova.scheduler.client.report [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 716.744799] env[63345]: DEBUG nova.network.neutron [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 716.820625] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016862, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.884372] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.960289] env[63345]: DEBUG oslo_concurrency.lockutils [req-865abfd4-11b3-42f0-872d-fe663a6cb8d8 req-3cccb20a-cf73-46cb-868a-79d0ba43a4b8 service nova] Releasing lock "refresh_cache-78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.961199] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5206b92c-eaa4-6f3f-9ccf-7089fdf1c6bf, 'name': SearchDatastore_Task, 'duration_secs': 0.011704} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.962509] env[63345]: DEBUG nova.network.neutron [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Port 8c1bd582-6867-4cba-9522-0e03560fa3f7 binding to destination host cpu-1 is already ACTIVE {{(pid=63345) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 716.962839] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "refresh_cache-27c6dc17-4ded-4fe7-8fba-265eae64fc32" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.963083] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquired lock "refresh_cache-27c6dc17-4ded-4fe7-8fba-265eae64fc32" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.963332] env[63345]: DEBUG nova.network.neutron [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 716.965432] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2632b4a1-36ca-458e-a647-6ea417af8d69 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.975025] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 716.975025] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52a760bf-d83b-e3b6-525d-384c4738b895" [ 716.975025] env[63345]: _type = "Task" [ 716.975025] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.983804] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a760bf-d83b-e3b6-525d-384c4738b895, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.103956] env[63345]: DEBUG nova.network.neutron [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Updating instance_info_cache with network_info: [{"id": "11bc09f4-1dfe-4b4b-8647-e126d27ae4b4", "address": "fa:16:3e:78:d9:45", "network": {"id": "51c855a2-1d6a-480d-8107-11fc171d6b4b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-48336875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea42057a148e453d869b4af82bdb21bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11bc09f4-1d", "ovs_interfaceid": "11bc09f4-1dfe-4b4b-8647-e126d27ae4b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.176661] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.107s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.180480] env[63345]: DEBUG oslo_concurrency.lockutils [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.610s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.186825] env[63345]: INFO nova.compute.claims [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 717.225963] env[63345]: INFO nova.scheduler.client.report [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Deleted allocations for instance 28caa5f5-141a-4ef9-abb3-33a1973d99cf [ 717.325883] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016862, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.409309] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "0d5cb238-2d25-47b1-8ce6-15a20836dbfb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.409570] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "0d5cb238-2d25-47b1-8ce6-15a20836dbfb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.409775] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "0d5cb238-2d25-47b1-8ce6-15a20836dbfb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.409960] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "0d5cb238-2d25-47b1-8ce6-15a20836dbfb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.410190] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "0d5cb238-2d25-47b1-8ce6-15a20836dbfb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.414262] env[63345]: INFO nova.compute.manager [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Terminating instance [ 717.490020] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a760bf-d83b-e3b6-525d-384c4738b895, 'name': SearchDatastore_Task, 'duration_secs': 0.012059} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.490020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.490020] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c/78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 717.490020] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77fb342c-338f-4376-a813-5d67b79b1b28 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.496983] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 717.496983] env[63345]: value = "task-1016865" [ 717.496983] env[63345]: _type = "Task" [ 717.496983] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.506227] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016865, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.561689] env[63345]: DEBUG nova.network.neutron [-] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.607173] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Releasing lock "refresh_cache-34e0234c-36c4-4878-979b-46f045bd1785" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.607566] env[63345]: DEBUG nova.compute.manager [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Instance network_info: |[{"id": "11bc09f4-1dfe-4b4b-8647-e126d27ae4b4", "address": "fa:16:3e:78:d9:45", "network": {"id": "51c855a2-1d6a-480d-8107-11fc171d6b4b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-48336875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea42057a148e453d869b4af82bdb21bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11bc09f4-1d", "ovs_interfaceid": "11bc09f4-1dfe-4b4b-8647-e126d27ae4b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 717.608040] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:d9:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa09e855-8af1-419b-b78d-8ffcc94b1bfb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '11bc09f4-1dfe-4b4b-8647-e126d27ae4b4', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 717.615497] env[63345]: DEBUG oslo.service.loopingcall [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 717.618645] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 717.619971] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b650f854-9c77-4d35-9dc1-151d2c9a8fd1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.636095] env[63345]: DEBUG nova.compute.manager [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Received event network-vif-plugged-11bc09f4-1dfe-4b4b-8647-e126d27ae4b4 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 717.636317] env[63345]: DEBUG oslo_concurrency.lockutils [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] Acquiring lock "34e0234c-36c4-4878-979b-46f045bd1785-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.636729] env[63345]: DEBUG oslo_concurrency.lockutils [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] Lock "34e0234c-36c4-4878-979b-46f045bd1785-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.636910] env[63345]: DEBUG oslo_concurrency.lockutils [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] Lock "34e0234c-36c4-4878-979b-46f045bd1785-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.637092] env[63345]: DEBUG nova.compute.manager [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] No waiting events found dispatching network-vif-plugged-11bc09f4-1dfe-4b4b-8647-e126d27ae4b4 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 717.637292] env[63345]: WARNING nova.compute.manager [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Received unexpected event network-vif-plugged-11bc09f4-1dfe-4b4b-8647-e126d27ae4b4 for instance with vm_state building and task_state spawning. [ 717.637517] env[63345]: DEBUG nova.compute.manager [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Received event network-changed-11bc09f4-1dfe-4b4b-8647-e126d27ae4b4 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 717.637632] env[63345]: DEBUG nova.compute.manager [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Refreshing instance network info cache due to event network-changed-11bc09f4-1dfe-4b4b-8647-e126d27ae4b4. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 717.637803] env[63345]: DEBUG oslo_concurrency.lockutils [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] Acquiring lock "refresh_cache-34e0234c-36c4-4878-979b-46f045bd1785" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.637933] env[63345]: DEBUG oslo_concurrency.lockutils [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] Acquired lock "refresh_cache-34e0234c-36c4-4878-979b-46f045bd1785" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.638098] env[63345]: DEBUG nova.network.neutron [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Refreshing network info cache for port 11bc09f4-1dfe-4b4b-8647-e126d27ae4b4 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 717.646600] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 717.646600] env[63345]: value = "task-1016866" [ 717.646600] env[63345]: _type = "Task" [ 717.646600] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.656481] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016866, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.740083] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0fd28a91-2a8f-436d-8d59-05d4831a8f65 tempest-ListServersNegativeTestJSON-726124101 tempest-ListServersNegativeTestJSON-726124101-project-member] Lock "28caa5f5-141a-4ef9-abb3-33a1973d99cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.693s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.828760] env[63345]: DEBUG oslo_vmware.api [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016862, 'name': PowerOnVM_Task, 'duration_secs': 2.00342} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.831794] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 717.832062] env[63345]: INFO nova.compute.manager [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Took 10.43 seconds to spawn the instance on the hypervisor. [ 717.832245] env[63345]: DEBUG nova.compute.manager [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 717.834325] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c9771f-54c2-4726-ac3d-bb7d0d44ff90 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.919100] env[63345]: DEBUG nova.compute.manager [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 717.919937] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 717.923021] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb7522e-a5bb-4b5a-a3d3-a1ccfcfcf0dd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.931507] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 717.933134] env[63345]: DEBUG nova.network.neutron [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Updating instance_info_cache with network_info: [{"id": "8c1bd582-6867-4cba-9522-0e03560fa3f7", "address": "fa:16:3e:3d:4f:aa", "network": {"id": "18285fd9-d154-415c-acbb-1494303e3b6c", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5dc99cc64e6c4d83928b309253a8df8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c1bd582-68", "ovs_interfaceid": "8c1bd582-6867-4cba-9522-0e03560fa3f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.937018] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4b5b44d-7e41-442b-9797-f81fde451544 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.945638] env[63345]: DEBUG oslo_vmware.api [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 717.945638] env[63345]: value = "task-1016867" [ 717.945638] env[63345]: _type = "Task" [ 717.945638] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.956497] env[63345]: DEBUG oslo_vmware.api [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016867, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.008926] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016865, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.063833] env[63345]: INFO nova.compute.manager [-] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Took 1.54 seconds to deallocate network for instance. [ 718.156754] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016866, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.360407] env[63345]: INFO nova.compute.manager [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Took 47.73 seconds to build instance. [ 718.440348] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Releasing lock "refresh_cache-27c6dc17-4ded-4fe7-8fba-265eae64fc32" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.458761] env[63345]: DEBUG oslo_vmware.api [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016867, 'name': PowerOffVM_Task, 'duration_secs': 0.364228} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.460668] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 718.460668] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 718.460668] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a84b9da2-05eb-4081-a58f-171ee0ea1d2e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.506998] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016865, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.663399} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.510426] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c/78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 718.510648] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 718.511429] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b2131407-1073-4dc2-a842-c05d0d5bca64 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.520881] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 718.520881] env[63345]: value = "task-1016869" [ 718.520881] env[63345]: _type = "Task" [ 718.520881] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.530377] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016869, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.551536] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 718.552027] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 718.552027] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Deleting the datastore file [datastore2] 0d5cb238-2d25-47b1-8ce6-15a20836dbfb {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 718.552366] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9667b2e4-816a-47e2-8d14-62e6e2bcd20f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.572211] env[63345]: DEBUG oslo_vmware.api [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 718.572211] env[63345]: value = "task-1016870" [ 718.572211] env[63345]: _type = "Task" [ 718.572211] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.574540] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.583098] env[63345]: DEBUG oslo_vmware.api [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016870, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.661578] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016866, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.704781] env[63345]: DEBUG nova.network.neutron [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Updated VIF entry in instance network info cache for port 11bc09f4-1dfe-4b4b-8647-e126d27ae4b4. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 718.705151] env[63345]: DEBUG nova.network.neutron [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Updating instance_info_cache with network_info: [{"id": "11bc09f4-1dfe-4b4b-8647-e126d27ae4b4", "address": "fa:16:3e:78:d9:45", "network": {"id": "51c855a2-1d6a-480d-8107-11fc171d6b4b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-48336875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea42057a148e453d869b4af82bdb21bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11bc09f4-1d", "ovs_interfaceid": "11bc09f4-1dfe-4b4b-8647-e126d27ae4b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.731565] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce908e10-4d5f-43c1-b3cb-7fe73c792871 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.740101] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ca7fa21-207b-49df-9cea-0db63cd25328 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.776479] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e37235-ed8b-491f-b9a7-02973f5aba89 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.783364] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7321a08e-f7b6-4b73-8c0e-6a8b6bf6ffdd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.800494] env[63345]: DEBUG nova.compute.provider_tree [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.864302] env[63345]: DEBUG oslo_concurrency.lockutils [None req-03913068-1684-4077-913f-1cd9fa8c8ae9 tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Lock "abc81fa5-78a9-48b1-a49e-2faffddf2411" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 129.182s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.946170] env[63345]: DEBUG nova.compute.manager [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63345) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:900}} [ 718.946415] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.032826] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016869, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068896} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.033239] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 719.034055] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4273e229-c4e3-4dd3-b356-4e7c603c3813 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.056661] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c/78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 719.056909] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8274ff36-2ac6-42ae-bfae-002b3e353ee4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.080742] env[63345]: DEBUG oslo_vmware.api [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016870, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.082178] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 719.082178] env[63345]: value = "task-1016871" [ 719.082178] env[63345]: _type = "Task" [ 719.082178] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.091787] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016871, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.095605] env[63345]: DEBUG oslo_concurrency.lockutils [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Acquiring lock "abc81fa5-78a9-48b1-a49e-2faffddf2411" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.095912] env[63345]: DEBUG oslo_concurrency.lockutils [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Lock "abc81fa5-78a9-48b1-a49e-2faffddf2411" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.096088] env[63345]: DEBUG oslo_concurrency.lockutils [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Acquiring lock "abc81fa5-78a9-48b1-a49e-2faffddf2411-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.096274] env[63345]: DEBUG oslo_concurrency.lockutils [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Lock "abc81fa5-78a9-48b1-a49e-2faffddf2411-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.096442] env[63345]: DEBUG oslo_concurrency.lockutils [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Lock "abc81fa5-78a9-48b1-a49e-2faffddf2411-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.100784] env[63345]: INFO nova.compute.manager [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Terminating instance [ 719.160083] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016866, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.208455] env[63345]: DEBUG oslo_concurrency.lockutils [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] Releasing lock "refresh_cache-34e0234c-36c4-4878-979b-46f045bd1785" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.208750] env[63345]: DEBUG nova.compute.manager [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Received event network-changed-9b0555db-b627-44ae-8812-42415d554cde {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 719.208936] env[63345]: DEBUG nova.compute.manager [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Refreshing instance network info cache due to event network-changed-9b0555db-b627-44ae-8812-42415d554cde. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 719.209295] env[63345]: DEBUG oslo_concurrency.lockutils [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] Acquiring lock "refresh_cache-869f8110-6490-4a47-955a-0ce085f826af" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.209420] env[63345]: DEBUG oslo_concurrency.lockutils [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] Acquired lock "refresh_cache-869f8110-6490-4a47-955a-0ce085f826af" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.209570] env[63345]: DEBUG nova.network.neutron [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Refreshing network info cache for port 9b0555db-b627-44ae-8812-42415d554cde {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 719.304524] env[63345]: DEBUG nova.scheduler.client.report [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 719.367133] env[63345]: DEBUG nova.compute.manager [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 719.583102] env[63345]: DEBUG oslo_vmware.api [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016870, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.606016} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.586013] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 719.586307] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 719.586526] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 719.586699] env[63345]: INFO nova.compute.manager [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Took 1.67 seconds to destroy the instance on the hypervisor. [ 719.586970] env[63345]: DEBUG oslo.service.loopingcall [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 719.587223] env[63345]: DEBUG nova.compute.manager [-] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 719.587370] env[63345]: DEBUG nova.network.neutron [-] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 719.594553] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016871, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.604322] env[63345]: DEBUG nova.compute.manager [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 719.604528] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 719.605362] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4797770c-f670-4bc9-ac09-eb72f485c933 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.613238] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 719.614074] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-684b9f8d-eee6-4877-8621-fb877b586d43 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.621245] env[63345]: DEBUG oslo_vmware.api [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Waiting for the task: (returnval){ [ 719.621245] env[63345]: value = "task-1016872" [ 719.621245] env[63345]: _type = "Task" [ 719.621245] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.631258] env[63345]: DEBUG oslo_vmware.api [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016872, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.659725] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016866, 'name': CreateVM_Task, 'duration_secs': 1.667021} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.659925] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 719.660944] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.661148] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.661471] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 719.661730] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-242902a5-faca-4f31-b599-521e11ceb946 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.667086] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 719.667086] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52752e57-cb66-33e4-c91a-f5abb6e0d904" [ 719.667086] env[63345]: _type = "Task" [ 719.667086] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.675457] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52752e57-cb66-33e4-c91a-f5abb6e0d904, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.816176] env[63345]: DEBUG oslo_concurrency.lockutils [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.816176] env[63345]: DEBUG nova.compute.manager [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 719.817572] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.375s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.819504] env[63345]: INFO nova.compute.claims [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 719.905803] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.095415] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016871, 'name': ReconfigVM_Task, 'duration_secs': 0.831152} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.099022] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Reconfigured VM instance instance-0000002c to attach disk [datastore2] 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c/78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 720.099022] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f03e0e5-533f-4a96-a153-74a9591ffa5b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.104031] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 720.104031] env[63345]: value = "task-1016873" [ 720.104031] env[63345]: _type = "Task" [ 720.104031] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.104465] env[63345]: DEBUG nova.network.neutron [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Updated VIF entry in instance network info cache for port 9b0555db-b627-44ae-8812-42415d554cde. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 720.104800] env[63345]: DEBUG nova.network.neutron [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Updating instance_info_cache with network_info: [{"id": "9b0555db-b627-44ae-8812-42415d554cde", "address": "fa:16:3e:4c:4f:70", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b0555db-b6", "ovs_interfaceid": "9b0555db-b627-44ae-8812-42415d554cde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.115728] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016873, 'name': Rename_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.140625] env[63345]: DEBUG oslo_vmware.api [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016872, 'name': PowerOffVM_Task, 'duration_secs': 0.260635} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.140625] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 720.140625] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 720.140625] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6393bcb7-85b6-4ce1-8da1-1c485d8b67c4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.178385] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52752e57-cb66-33e4-c91a-f5abb6e0d904, 'name': SearchDatastore_Task, 'duration_secs': 0.021988} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.178706] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.178948] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 720.179205] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.179354] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.179571] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 720.179950] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83d559fd-84f1-44e8-b912-40d2e82f00ca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.189372] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 720.189372] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 720.189840] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be8b1259-477e-4de8-91e8-1671307af52b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.200686] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 720.200686] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52bd88c5-d274-bfc0-fc2c-94fe48d12f68" [ 720.200686] env[63345]: _type = "Task" [ 720.200686] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.211526] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52bd88c5-d274-bfc0-fc2c-94fe48d12f68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.223655] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 720.223655] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Deleting contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 720.223655] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Deleting the datastore file [datastore1] abc81fa5-78a9-48b1-a49e-2faffddf2411 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 720.223655] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e1cb8c95-0cc6-425c-b5ea-31829e2e7a8f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.231924] env[63345]: DEBUG oslo_vmware.api [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Waiting for the task: (returnval){ [ 720.231924] env[63345]: value = "task-1016875" [ 720.231924] env[63345]: _type = "Task" [ 720.231924] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.238732] env[63345]: DEBUG nova.compute.manager [req-19cb5a18-0b16-4a8e-b364-72440a624c51 req-4be162ce-86ff-4750-a92d-ed868d16aafd service nova] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Received event network-vif-deleted-a5abe431-00eb-4c22-81e4-d160cc76d360 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 720.238732] env[63345]: INFO nova.compute.manager [req-19cb5a18-0b16-4a8e-b364-72440a624c51 req-4be162ce-86ff-4750-a92d-ed868d16aafd service nova] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Neutron deleted interface a5abe431-00eb-4c22-81e4-d160cc76d360; detaching it from the instance and deleting it from the info cache [ 720.238948] env[63345]: DEBUG nova.network.neutron [req-19cb5a18-0b16-4a8e-b364-72440a624c51 req-4be162ce-86ff-4750-a92d-ed868d16aafd service nova] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.246361] env[63345]: DEBUG oslo_vmware.api [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016875, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.325459] env[63345]: DEBUG nova.compute.utils [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 720.334331] env[63345]: DEBUG nova.compute.manager [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 720.335179] env[63345]: DEBUG nova.network.neutron [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 720.610950] env[63345]: DEBUG nova.network.neutron [-] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.613159] env[63345]: DEBUG oslo_concurrency.lockutils [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] Releasing lock "refresh_cache-869f8110-6490-4a47-955a-0ce085f826af" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.613159] env[63345]: DEBUG nova.compute.manager [req-ffdabe9f-33a5-4c08-bf3c-a8ccbba4db62 req-7c50ade4-3be0-48d1-b95e-a9a4d401d5d7 service nova] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Received event network-vif-deleted-32300854-3281-41f9-8ba4-87a1c457e72c {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 720.626678] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016873, 'name': Rename_Task, 'duration_secs': 0.152501} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.626906] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 720.628462] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d12a86d-fa88-4bf6-9398-841e22b0d4c8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.635436] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 720.635436] env[63345]: value = "task-1016876" [ 720.635436] env[63345]: _type = "Task" [ 720.635436] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.648736] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016876, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.693566] env[63345]: DEBUG nova.policy [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e36fd04030444217acadbbf4e4fe9be0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33c28bfca4da460e8ca96dc7519204c8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 720.710980] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52bd88c5-d274-bfc0-fc2c-94fe48d12f68, 'name': SearchDatastore_Task, 'duration_secs': 0.009492} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.717864] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdf0c753-3504-4af8-bc50-082be45bb88f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.722870] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 720.722870] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52a31247-0c42-487a-88f6-99652e63dc98" [ 720.722870] env[63345]: _type = "Task" [ 720.722870] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.733067] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a31247-0c42-487a-88f6-99652e63dc98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.744059] env[63345]: DEBUG oslo_vmware.api [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Task: {'id': task-1016875, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.491704} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.745760] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa9d61c9-cf77-47c2-a020-acfc5cd5b921 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.747718] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 720.748069] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Deleted contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 720.748227] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 720.748422] env[63345]: INFO nova.compute.manager [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Took 1.14 seconds to destroy the instance on the hypervisor. [ 720.748654] env[63345]: DEBUG oslo.service.loopingcall [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 720.749238] env[63345]: DEBUG nova.compute.manager [-] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 720.749337] env[63345]: DEBUG nova.network.neutron [-] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 720.757108] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02c050f-9f7c-442e-aee4-bfecb018c6d7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.796016] env[63345]: DEBUG nova.compute.manager [req-19cb5a18-0b16-4a8e-b364-72440a624c51 req-4be162ce-86ff-4750-a92d-ed868d16aafd service nova] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Detach interface failed, port_id=a5abe431-00eb-4c22-81e4-d160cc76d360, reason: Instance 0d5cb238-2d25-47b1-8ce6-15a20836dbfb could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 720.835380] env[63345]: DEBUG nova.compute.manager [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 720.853759] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f9f0b9-e86d-48e3-95a7-5f99bd1e4b74 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.862905] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109a168d-fe51-4095-b359-ad0cc9922a8b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.903887] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6383c377-505c-4d42-b480-126bffec0e4f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.912921] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f178929a-37cf-4433-90bd-867634c147e9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.928587] env[63345]: DEBUG nova.compute.provider_tree [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.117568] env[63345]: INFO nova.compute.manager [-] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Took 1.53 seconds to deallocate network for instance. [ 721.154698] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016876, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.234267] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a31247-0c42-487a-88f6-99652e63dc98, 'name': SearchDatastore_Task, 'duration_secs': 0.023941} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.234553] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 721.234553] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 34e0234c-36c4-4878-979b-46f045bd1785/34e0234c-36c4-4878-979b-46f045bd1785.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 721.234832] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f73db0e-f6fc-4487-9cba-f0e35df091fd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.241817] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 721.241817] env[63345]: value = "task-1016877" [ 721.241817] env[63345]: _type = "Task" [ 721.241817] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.254853] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016877, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.318744] env[63345]: DEBUG nova.compute.manager [req-d15f7d93-4cf5-4004-8a3a-ae36e9074e66 req-03641f37-1f5e-4a5c-bb51-1fccedf8c6fb service nova] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Received event network-vif-deleted-a41f3ab6-3d63-4295-a7d4-ab01b18deef6 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 721.318948] env[63345]: INFO nova.compute.manager [req-d15f7d93-4cf5-4004-8a3a-ae36e9074e66 req-03641f37-1f5e-4a5c-bb51-1fccedf8c6fb service nova] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Neutron deleted interface a41f3ab6-3d63-4295-a7d4-ab01b18deef6; detaching it from the instance and deleting it from the info cache [ 721.319135] env[63345]: DEBUG nova.network.neutron [req-d15f7d93-4cf5-4004-8a3a-ae36e9074e66 req-03641f37-1f5e-4a5c-bb51-1fccedf8c6fb service nova] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.434017] env[63345]: DEBUG nova.scheduler.client.report [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 721.547049] env[63345]: DEBUG nova.network.neutron [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Successfully created port: 989bf403-079f-46b9-ab79-c645cec393aa {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 721.588475] env[63345]: DEBUG nova.network.neutron [-] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.627020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.649670] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016876, 'name': PowerOnVM_Task, 'duration_secs': 0.644804} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.649670] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 721.649670] env[63345]: INFO nova.compute.manager [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Took 9.33 seconds to spawn the instance on the hypervisor. [ 721.649670] env[63345]: DEBUG nova.compute.manager [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 721.650405] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a7d312-9059-4ed8-bf97-c47b1ce90886 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.755043] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016877, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.824461] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c90fa47-8041-400b-83a4-4e0bb28b76af {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.834948] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9a0cb6-ec86-43c4-9d2d-e26dc61eeefe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.853093] env[63345]: DEBUG nova.compute.manager [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 721.880124] env[63345]: DEBUG nova.compute.manager [req-d15f7d93-4cf5-4004-8a3a-ae36e9074e66 req-03641f37-1f5e-4a5c-bb51-1fccedf8c6fb service nova] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Detach interface failed, port_id=a41f3ab6-3d63-4295-a7d4-ab01b18deef6, reason: Instance abc81fa5-78a9-48b1-a49e-2faffddf2411 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 721.894335] env[63345]: DEBUG nova.virt.hardware [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 721.894335] env[63345]: DEBUG nova.virt.hardware [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 721.894335] env[63345]: DEBUG nova.virt.hardware [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 721.894563] env[63345]: DEBUG nova.virt.hardware [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 721.894563] env[63345]: DEBUG nova.virt.hardware [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 721.894563] env[63345]: DEBUG nova.virt.hardware [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 721.894563] env[63345]: DEBUG nova.virt.hardware [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 721.895181] env[63345]: DEBUG nova.virt.hardware [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 721.895551] env[63345]: DEBUG nova.virt.hardware [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 721.896983] env[63345]: DEBUG nova.virt.hardware [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 721.896983] env[63345]: DEBUG nova.virt.hardware [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 721.897263] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df28b93-c906-41df-aeeb-75dc3fe2c34c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.907806] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085fd733-5f5d-4e8d-aad3-3c90b2434ccc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.941076] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.123s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.941632] env[63345]: DEBUG nova.compute.manager [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 721.944526] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.378s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.948019] env[63345]: INFO nova.compute.claims [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 722.091496] env[63345]: INFO nova.compute.manager [-] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Took 1.34 seconds to deallocate network for instance. [ 722.172306] env[63345]: INFO nova.compute.manager [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Took 34.65 seconds to build instance. [ 722.257149] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016877, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.700597} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.257475] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 34e0234c-36c4-4878-979b-46f045bd1785/34e0234c-36c4-4878-979b-46f045bd1785.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 722.257914] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 722.258229] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-53607622-b766-4602-b848-4734963e4434 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.265700] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 722.265700] env[63345]: value = "task-1016878" [ 722.265700] env[63345]: _type = "Task" [ 722.265700] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.275598] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016878, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.453375] env[63345]: DEBUG nova.compute.utils [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 722.458035] env[63345]: DEBUG nova.compute.manager [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 722.458035] env[63345]: DEBUG nova.network.neutron [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 722.530093] env[63345]: DEBUG nova.policy [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '620aa8aab5b7456e8d0feda8a3d9a225', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc3b4aff33e540d79c796f98c315a05a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 722.599046] env[63345]: DEBUG oslo_concurrency.lockutils [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.675369] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 129.029s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.782028] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016878, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.176492} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.782028] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 722.787029] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82f3f42-eea8-40d4-82b1-8628204133c7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.813959] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] 34e0234c-36c4-4878-979b-46f045bd1785/34e0234c-36c4-4878-979b-46f045bd1785.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 722.814179] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1d45bde-1589-4fe8-aadb-c234b2657f78 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.835924] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 722.835924] env[63345]: value = "task-1016879" [ 722.835924] env[63345]: _type = "Task" [ 722.835924] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.845645] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016879, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.964590] env[63345]: DEBUG nova.compute.manager [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 723.029160] env[63345]: DEBUG nova.network.neutron [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Successfully created port: 2eec7fb7-14bd-4975-ac39-8b00f81ac502 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 723.139989] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Acquiring lock "02eb493e-d1a1-4461-8e3f-e493e96fe058" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.140286] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Lock "02eb493e-d1a1-4461-8e3f-e493e96fe058" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.177533] env[63345]: DEBUG nova.compute.manager [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 723.353084] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016879, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.500880] env[63345]: DEBUG nova.compute.manager [req-b53a2e84-f469-4650-b02a-6122bb40b1d4 req-b5983038-5c73-415e-bc2a-1dd8264764e4 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Received event network-vif-plugged-989bf403-079f-46b9-ab79-c645cec393aa {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 723.501222] env[63345]: DEBUG oslo_concurrency.lockutils [req-b53a2e84-f469-4650-b02a-6122bb40b1d4 req-b5983038-5c73-415e-bc2a-1dd8264764e4 service nova] Acquiring lock "85fb1ecd-4ca3-401d-a87a-131f0b275506-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.501536] env[63345]: DEBUG oslo_concurrency.lockutils [req-b53a2e84-f469-4650-b02a-6122bb40b1d4 req-b5983038-5c73-415e-bc2a-1dd8264764e4 service nova] Lock "85fb1ecd-4ca3-401d-a87a-131f0b275506-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.501728] env[63345]: DEBUG oslo_concurrency.lockutils [req-b53a2e84-f469-4650-b02a-6122bb40b1d4 req-b5983038-5c73-415e-bc2a-1dd8264764e4 service nova] Lock "85fb1ecd-4ca3-401d-a87a-131f0b275506-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.502196] env[63345]: DEBUG nova.compute.manager [req-b53a2e84-f469-4650-b02a-6122bb40b1d4 req-b5983038-5c73-415e-bc2a-1dd8264764e4 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] No waiting events found dispatching network-vif-plugged-989bf403-079f-46b9-ab79-c645cec393aa {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 723.502344] env[63345]: WARNING nova.compute.manager [req-b53a2e84-f469-4650-b02a-6122bb40b1d4 req-b5983038-5c73-415e-bc2a-1dd8264764e4 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Received unexpected event network-vif-plugged-989bf403-079f-46b9-ab79-c645cec393aa for instance with vm_state building and task_state spawning. [ 723.522870] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f479c05-21b0-448e-b44c-a8306d53d15d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.533895] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f6b1d0-36ff-4386-a957-92dbfeb6d26c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.570441] env[63345]: DEBUG nova.network.neutron [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Successfully updated port: 989bf403-079f-46b9-ab79-c645cec393aa {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 723.572945] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e9663e-8a01-45e6-9308-2138b256a847 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.585127] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31945df4-8483-4fbd-87e5-81ea0eeb9c16 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.599505] env[63345]: DEBUG nova.compute.provider_tree [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.703948] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.850308] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016879, 'name': ReconfigVM_Task, 'duration_secs': 0.789475} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.850308] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Reconfigured VM instance instance-0000002d to attach disk [datastore2] 34e0234c-36c4-4878-979b-46f045bd1785/34e0234c-36c4-4878-979b-46f045bd1785.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 723.850308] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d746c52-c0bf-433b-813e-9d1093ff434f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.857018] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 723.857018] env[63345]: value = "task-1016880" [ 723.857018] env[63345]: _type = "Task" [ 723.857018] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.865041] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016880, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.979028] env[63345]: DEBUG nova.compute.manager [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 724.004100] env[63345]: DEBUG nova.virt.hardware [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 724.004673] env[63345]: DEBUG nova.virt.hardware [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 724.004991] env[63345]: DEBUG nova.virt.hardware [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 724.005369] env[63345]: DEBUG nova.virt.hardware [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 724.005689] env[63345]: DEBUG nova.virt.hardware [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 724.006000] env[63345]: DEBUG nova.virt.hardware [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 724.006516] env[63345]: DEBUG nova.virt.hardware [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 724.006851] env[63345]: DEBUG nova.virt.hardware [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 724.007310] env[63345]: DEBUG nova.virt.hardware [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 724.007659] env[63345]: DEBUG nova.virt.hardware [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 724.010119] env[63345]: DEBUG nova.virt.hardware [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 724.010119] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933c8bdb-33ad-4b04-9869-2433bbfca07b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.018735] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355c84a0-9d18-4f34-8e18-d46e51057b1f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.079858] env[63345]: DEBUG oslo_concurrency.lockutils [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "refresh_cache-85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.080041] env[63345]: DEBUG oslo_concurrency.lockutils [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "refresh_cache-85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.080273] env[63345]: DEBUG nova.network.neutron [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 724.102851] env[63345]: DEBUG nova.scheduler.client.report [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 724.365384] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016880, 'name': Rename_Task, 'duration_secs': 0.14683} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.365801] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 724.366117] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-968352c2-fcea-4ea6-bdb8-df2167f2af5f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.376280] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 724.376280] env[63345]: value = "task-1016881" [ 724.376280] env[63345]: _type = "Task" [ 724.376280] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.387537] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016881, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.608740] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.664s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.609292] env[63345]: DEBUG nova.compute.manager [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 724.611934] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.378s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.616031] env[63345]: INFO nova.compute.claims [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 724.635087] env[63345]: DEBUG nova.network.neutron [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 724.755456] env[63345]: DEBUG nova.compute.manager [req-952bb2e1-5bbc-453a-ae83-28676b4d97ac req-61c350ea-eb9a-461f-8030-96e012816878 service nova] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Received event network-vif-plugged-2eec7fb7-14bd-4975-ac39-8b00f81ac502 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 724.755456] env[63345]: DEBUG oslo_concurrency.lockutils [req-952bb2e1-5bbc-453a-ae83-28676b4d97ac req-61c350ea-eb9a-461f-8030-96e012816878 service nova] Acquiring lock "3101726f-5b14-417e-bcf8-390ce1f9b467-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.755456] env[63345]: DEBUG oslo_concurrency.lockutils [req-952bb2e1-5bbc-453a-ae83-28676b4d97ac req-61c350ea-eb9a-461f-8030-96e012816878 service nova] Lock "3101726f-5b14-417e-bcf8-390ce1f9b467-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.755456] env[63345]: DEBUG oslo_concurrency.lockutils [req-952bb2e1-5bbc-453a-ae83-28676b4d97ac req-61c350ea-eb9a-461f-8030-96e012816878 service nova] Lock "3101726f-5b14-417e-bcf8-390ce1f9b467-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.755456] env[63345]: DEBUG nova.compute.manager [req-952bb2e1-5bbc-453a-ae83-28676b4d97ac req-61c350ea-eb9a-461f-8030-96e012816878 service nova] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] No waiting events found dispatching network-vif-plugged-2eec7fb7-14bd-4975-ac39-8b00f81ac502 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 724.755833] env[63345]: WARNING nova.compute.manager [req-952bb2e1-5bbc-453a-ae83-28676b4d97ac req-61c350ea-eb9a-461f-8030-96e012816878 service nova] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Received unexpected event network-vif-plugged-2eec7fb7-14bd-4975-ac39-8b00f81ac502 for instance with vm_state building and task_state spawning. [ 724.822914] env[63345]: DEBUG nova.network.neutron [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Successfully updated port: 2eec7fb7-14bd-4975-ac39-8b00f81ac502 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 724.881559] env[63345]: DEBUG nova.network.neutron [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Updating instance_info_cache with network_info: [{"id": "989bf403-079f-46b9-ab79-c645cec393aa", "address": "fa:16:3e:93:20:cd", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989bf403-07", "ovs_interfaceid": "989bf403-079f-46b9-ab79-c645cec393aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.886624] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Acquiring lock "00c58889-75f7-4a4b-a5a3-a45723c1f495" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.886917] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Lock "00c58889-75f7-4a4b-a5a3-a45723c1f495" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.894448] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016881, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.118194] env[63345]: DEBUG nova.compute.utils [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 725.121827] env[63345]: DEBUG nova.compute.manager [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 725.121993] env[63345]: DEBUG nova.network.neutron [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 725.161563] env[63345]: DEBUG nova.policy [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2933a580908d4f1796010a18b34684a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9f769fb06b3c4d32a9aa2b99943344ab', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 725.328313] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "refresh_cache-3101726f-5b14-417e-bcf8-390ce1f9b467" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.328468] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquired lock "refresh_cache-3101726f-5b14-417e-bcf8-390ce1f9b467" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.328641] env[63345]: DEBUG nova.network.neutron [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 725.391018] env[63345]: DEBUG oslo_concurrency.lockutils [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "refresh_cache-85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.391018] env[63345]: DEBUG nova.compute.manager [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Instance network_info: |[{"id": "989bf403-079f-46b9-ab79-c645cec393aa", "address": "fa:16:3e:93:20:cd", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989bf403-07", "ovs_interfaceid": "989bf403-079f-46b9-ab79-c645cec393aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 725.391393] env[63345]: DEBUG oslo_vmware.api [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016881, 'name': PowerOnVM_Task, 'duration_secs': 0.560114} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.391393] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:20:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f35e69ef-c2c8-4b8c-9887-33e97b242c0a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '989bf403-079f-46b9-ab79-c645cec393aa', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 725.403434] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Creating folder: Project (33c28bfca4da460e8ca96dc7519204c8). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 725.404156] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 725.404345] env[63345]: INFO nova.compute.manager [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Took 10.43 seconds to spawn the instance on the hypervisor. [ 725.405211] env[63345]: DEBUG nova.compute.manager [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 725.405211] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17ebc56e-59cc-486a-a699-7bb5ee9158d0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.409143] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fcfe7f5-51dd-4333-b030-11ae381bb636 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.422016] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Created folder: Project (33c28bfca4da460e8ca96dc7519204c8) in parent group-v225918. [ 725.422016] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Creating folder: Instances. Parent ref: group-v225990. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 725.422016] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8174b965-f5e2-4c95-993a-8eb2eac1c157 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.430576] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Created folder: Instances in parent group-v225990. [ 725.430576] env[63345]: DEBUG oslo.service.loopingcall [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 725.430576] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 725.430576] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48bdeef2-df5c-44f5-95e3-2582802371ff {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.450427] env[63345]: DEBUG nova.network.neutron [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Successfully created port: 6a41176d-7c10-4226-8332-eff6eea91574 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 725.458208] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 725.458208] env[63345]: value = "task-1016884" [ 725.458208] env[63345]: _type = "Task" [ 725.458208] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.466277] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016884, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.574150] env[63345]: DEBUG nova.compute.manager [req-a7cd792f-a130-4530-b6b3-618eba01a520 req-1cdc91c2-d05d-4d45-ac02-dc8123f6eab1 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Received event network-changed-989bf403-079f-46b9-ab79-c645cec393aa {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 725.574150] env[63345]: DEBUG nova.compute.manager [req-a7cd792f-a130-4530-b6b3-618eba01a520 req-1cdc91c2-d05d-4d45-ac02-dc8123f6eab1 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Refreshing instance network info cache due to event network-changed-989bf403-079f-46b9-ab79-c645cec393aa. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 725.574150] env[63345]: DEBUG oslo_concurrency.lockutils [req-a7cd792f-a130-4530-b6b3-618eba01a520 req-1cdc91c2-d05d-4d45-ac02-dc8123f6eab1 service nova] Acquiring lock "refresh_cache-85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.574286] env[63345]: DEBUG oslo_concurrency.lockutils [req-a7cd792f-a130-4530-b6b3-618eba01a520 req-1cdc91c2-d05d-4d45-ac02-dc8123f6eab1 service nova] Acquired lock "refresh_cache-85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.574508] env[63345]: DEBUG nova.network.neutron [req-a7cd792f-a130-4530-b6b3-618eba01a520 req-1cdc91c2-d05d-4d45-ac02-dc8123f6eab1 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Refreshing network info cache for port 989bf403-079f-46b9-ab79-c645cec393aa {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 725.630207] env[63345]: DEBUG nova.compute.manager [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 725.872358] env[63345]: DEBUG nova.network.neutron [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 725.930492] env[63345]: INFO nova.compute.manager [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Took 36.95 seconds to build instance. [ 725.975278] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016884, 'name': CreateVM_Task, 'duration_secs': 0.351612} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.977755] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 725.978604] env[63345]: DEBUG oslo_concurrency.lockutils [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.978776] env[63345]: DEBUG oslo_concurrency.lockutils [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.979110] env[63345]: DEBUG oslo_concurrency.lockutils [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 725.979364] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d04ffabb-bf00-45f9-ac92-5e64723213e3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.987669] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 725.987669] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]524a6a3c-de12-782f-717b-4983ce2b301f" [ 725.987669] env[63345]: _type = "Task" [ 725.987669] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.993714] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524a6a3c-de12-782f-717b-4983ce2b301f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.071136] env[63345]: DEBUG nova.network.neutron [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Updating instance_info_cache with network_info: [{"id": "2eec7fb7-14bd-4975-ac39-8b00f81ac502", "address": "fa:16:3e:74:69:6b", "network": {"id": "c9f406eb-96bc-4c63-8f76-474a8fcc4f7d", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1744519845-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc3b4aff33e540d79c796f98c315a05a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2eec7fb7-14", "ovs_interfaceid": "2eec7fb7-14bd-4975-ac39-8b00f81ac502", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.182523] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ff4c67-2cc5-45bc-b2c0-7e4be7997f09 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.190284] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a287d4-484a-4d68-a03f-d3b7065b38aa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.224899] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ad5b7a-17ac-4f69-ae20-b770624cefc0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.233669] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e13d8c-66eb-497c-8c85-46296fdf0370 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.247819] env[63345]: DEBUG nova.compute.provider_tree [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.338688] env[63345]: DEBUG nova.network.neutron [req-a7cd792f-a130-4530-b6b3-618eba01a520 req-1cdc91c2-d05d-4d45-ac02-dc8123f6eab1 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Updated VIF entry in instance network info cache for port 989bf403-079f-46b9-ab79-c645cec393aa. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 726.339072] env[63345]: DEBUG nova.network.neutron [req-a7cd792f-a130-4530-b6b3-618eba01a520 req-1cdc91c2-d05d-4d45-ac02-dc8123f6eab1 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Updating instance_info_cache with network_info: [{"id": "989bf403-079f-46b9-ab79-c645cec393aa", "address": "fa:16:3e:93:20:cd", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989bf403-07", "ovs_interfaceid": "989bf403-079f-46b9-ab79-c645cec393aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.434856] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a50e6b2f-3edd-409a-ac77-8a0376f59830 tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "34e0234c-36c4-4878-979b-46f045bd1785" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 132.712s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.494860] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524a6a3c-de12-782f-717b-4983ce2b301f, 'name': SearchDatastore_Task, 'duration_secs': 0.009566} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.495169] env[63345]: DEBUG oslo_concurrency.lockutils [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.495469] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 726.495711] env[63345]: DEBUG oslo_concurrency.lockutils [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.495874] env[63345]: DEBUG oslo_concurrency.lockutils [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.496067] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 726.496321] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3c86f5c-8216-4d45-aa66-8931c4350c61 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.504133] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 726.504625] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 726.504872] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c09859d5-fafb-4546-84d0-d5d1ecfc6d14 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.509790] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 726.509790] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5259bea6-fbec-bfea-33a3-0292d8af81e0" [ 726.509790] env[63345]: _type = "Task" [ 726.509790] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.517297] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5259bea6-fbec-bfea-33a3-0292d8af81e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.576077] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Releasing lock "refresh_cache-3101726f-5b14-417e-bcf8-390ce1f9b467" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.576433] env[63345]: DEBUG nova.compute.manager [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Instance network_info: |[{"id": "2eec7fb7-14bd-4975-ac39-8b00f81ac502", "address": "fa:16:3e:74:69:6b", "network": {"id": "c9f406eb-96bc-4c63-8f76-474a8fcc4f7d", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1744519845-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc3b4aff33e540d79c796f98c315a05a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2eec7fb7-14", "ovs_interfaceid": "2eec7fb7-14bd-4975-ac39-8b00f81ac502", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 726.576854] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:69:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2eec7fb7-14bd-4975-ac39-8b00f81ac502', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 726.584617] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Creating folder: Project (dc3b4aff33e540d79c796f98c315a05a). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 726.585240] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d74ab73c-c68f-4bfb-b387-b62d7f9d213f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.598029] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Created folder: Project (dc3b4aff33e540d79c796f98c315a05a) in parent group-v225918. [ 726.598029] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Creating folder: Instances. Parent ref: group-v225993. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 726.598151] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6736787-b6c3-401c-abb6-94f52b907b45 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.608427] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Created folder: Instances in parent group-v225993. [ 726.608657] env[63345]: DEBUG oslo.service.loopingcall [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 726.608849] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 726.609063] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a849637-e7c9-442f-9fc0-ea9602b0136c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.628615] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 726.628615] env[63345]: value = "task-1016887" [ 726.628615] env[63345]: _type = "Task" [ 726.628615] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.636436] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016887, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.638966] env[63345]: DEBUG nova.compute.manager [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 726.666837] env[63345]: DEBUG nova.virt.hardware [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 726.667098] env[63345]: DEBUG nova.virt.hardware [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 726.667262] env[63345]: DEBUG nova.virt.hardware [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 726.667444] env[63345]: DEBUG nova.virt.hardware [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 726.667592] env[63345]: DEBUG nova.virt.hardware [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 726.667739] env[63345]: DEBUG nova.virt.hardware [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 726.667945] env[63345]: DEBUG nova.virt.hardware [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 726.668120] env[63345]: DEBUG nova.virt.hardware [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 726.668312] env[63345]: DEBUG nova.virt.hardware [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 726.668513] env[63345]: DEBUG nova.virt.hardware [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 726.668694] env[63345]: DEBUG nova.virt.hardware [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 726.669555] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5a6777-a63b-4ec9-8160-189062e1130f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.677022] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb1b2ee-bf4b-4ecf-b8e5-76e056c7e2bf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.751315] env[63345]: DEBUG nova.scheduler.client.report [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 726.829495] env[63345]: DEBUG nova.compute.manager [req-3f027fa4-59cf-4ed7-ac14-3100e71db818 req-c885119e-2fcd-4d5d-9ec7-b8fa6b7fe774 service nova] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Received event network-changed-2eec7fb7-14bd-4975-ac39-8b00f81ac502 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 726.829730] env[63345]: DEBUG nova.compute.manager [req-3f027fa4-59cf-4ed7-ac14-3100e71db818 req-c885119e-2fcd-4d5d-9ec7-b8fa6b7fe774 service nova] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Refreshing instance network info cache due to event network-changed-2eec7fb7-14bd-4975-ac39-8b00f81ac502. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 726.829980] env[63345]: DEBUG oslo_concurrency.lockutils [req-3f027fa4-59cf-4ed7-ac14-3100e71db818 req-c885119e-2fcd-4d5d-9ec7-b8fa6b7fe774 service nova] Acquiring lock "refresh_cache-3101726f-5b14-417e-bcf8-390ce1f9b467" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.830164] env[63345]: DEBUG oslo_concurrency.lockutils [req-3f027fa4-59cf-4ed7-ac14-3100e71db818 req-c885119e-2fcd-4d5d-9ec7-b8fa6b7fe774 service nova] Acquired lock "refresh_cache-3101726f-5b14-417e-bcf8-390ce1f9b467" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.830367] env[63345]: DEBUG nova.network.neutron [req-3f027fa4-59cf-4ed7-ac14-3100e71db818 req-c885119e-2fcd-4d5d-9ec7-b8fa6b7fe774 service nova] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Refreshing network info cache for port 2eec7fb7-14bd-4975-ac39-8b00f81ac502 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 726.841911] env[63345]: DEBUG oslo_concurrency.lockutils [req-a7cd792f-a130-4530-b6b3-618eba01a520 req-1cdc91c2-d05d-4d45-ac02-dc8123f6eab1 service nova] Releasing lock "refresh_cache-85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.937730] env[63345]: DEBUG nova.compute.manager [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 727.021926] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5259bea6-fbec-bfea-33a3-0292d8af81e0, 'name': SearchDatastore_Task, 'duration_secs': 0.015554} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.022837] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfef7678-638f-4d32-9649-aa3f1abf32a8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.028149] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 727.028149] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52336eae-fa83-7f99-5dae-f67489cac28b" [ 727.028149] env[63345]: _type = "Task" [ 727.028149] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.036575] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52336eae-fa83-7f99-5dae-f67489cac28b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.138930] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016887, 'name': CreateVM_Task, 'duration_secs': 0.346923} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.139457] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 727.140152] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.140366] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.140656] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 727.140900] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae7bb7b6-4826-48ae-9219-7b5072eda9fb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.145621] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 727.145621] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52bd4ad3-bb39-dcd9-b864-70984779cbbb" [ 727.145621] env[63345]: _type = "Task" [ 727.145621] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.152912] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52bd4ad3-bb39-dcd9-b864-70984779cbbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.261148] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.649s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.261548] env[63345]: DEBUG nova.compute.manager [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 727.265869] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.314s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.269019] env[63345]: INFO nova.compute.claims [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 727.393170] env[63345]: DEBUG nova.network.neutron [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Successfully updated port: 6a41176d-7c10-4226-8332-eff6eea91574 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 727.459299] env[63345]: DEBUG oslo_concurrency.lockutils [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.459966] env[63345]: DEBUG oslo_concurrency.lockutils [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.459966] env[63345]: DEBUG oslo_concurrency.lockutils [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.460144] env[63345]: DEBUG oslo_concurrency.lockutils [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.460326] env[63345]: DEBUG oslo_concurrency.lockutils [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.462611] env[63345]: INFO nova.compute.manager [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Terminating instance [ 727.466020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.539670] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52336eae-fa83-7f99-5dae-f67489cac28b, 'name': SearchDatastore_Task, 'duration_secs': 0.008575} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.539910] env[63345]: DEBUG oslo_concurrency.lockutils [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.540200] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 85fb1ecd-4ca3-401d-a87a-131f0b275506/85fb1ecd-4ca3-401d-a87a-131f0b275506.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 727.540868] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f347644-40cc-4641-bed7-86fc1a922d11 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.547269] env[63345]: DEBUG oslo_concurrency.lockutils [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "34e0234c-36c4-4878-979b-46f045bd1785" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.547526] env[63345]: DEBUG oslo_concurrency.lockutils [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "34e0234c-36c4-4878-979b-46f045bd1785" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.547786] env[63345]: DEBUG oslo_concurrency.lockutils [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "34e0234c-36c4-4878-979b-46f045bd1785-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.548096] env[63345]: DEBUG oslo_concurrency.lockutils [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "34e0234c-36c4-4878-979b-46f045bd1785-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.548207] env[63345]: DEBUG oslo_concurrency.lockutils [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "34e0234c-36c4-4878-979b-46f045bd1785-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.551260] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 727.551260] env[63345]: value = "task-1016888" [ 727.551260] env[63345]: _type = "Task" [ 727.551260] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.554932] env[63345]: INFO nova.compute.manager [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Terminating instance [ 727.563372] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1016888, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.575966] env[63345]: DEBUG nova.network.neutron [req-3f027fa4-59cf-4ed7-ac14-3100e71db818 req-c885119e-2fcd-4d5d-9ec7-b8fa6b7fe774 service nova] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Updated VIF entry in instance network info cache for port 2eec7fb7-14bd-4975-ac39-8b00f81ac502. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 727.576364] env[63345]: DEBUG nova.network.neutron [req-3f027fa4-59cf-4ed7-ac14-3100e71db818 req-c885119e-2fcd-4d5d-9ec7-b8fa6b7fe774 service nova] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Updating instance_info_cache with network_info: [{"id": "2eec7fb7-14bd-4975-ac39-8b00f81ac502", "address": "fa:16:3e:74:69:6b", "network": {"id": "c9f406eb-96bc-4c63-8f76-474a8fcc4f7d", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1744519845-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc3b4aff33e540d79c796f98c315a05a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2eec7fb7-14", "ovs_interfaceid": "2eec7fb7-14bd-4975-ac39-8b00f81ac502", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.646352] env[63345]: DEBUG nova.compute.manager [req-2b62f7df-a02c-4047-b297-51e7dde69fb5 req-7d90650b-7958-4b29-bb24-fb999ba3f889 service nova] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Received event network-vif-plugged-6a41176d-7c10-4226-8332-eff6eea91574 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 727.646650] env[63345]: DEBUG oslo_concurrency.lockutils [req-2b62f7df-a02c-4047-b297-51e7dde69fb5 req-7d90650b-7958-4b29-bb24-fb999ba3f889 service nova] Acquiring lock "64fcf837-1d9d-41b1-a2a1-3c16362932cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.646932] env[63345]: DEBUG oslo_concurrency.lockutils [req-2b62f7df-a02c-4047-b297-51e7dde69fb5 req-7d90650b-7958-4b29-bb24-fb999ba3f889 service nova] Lock "64fcf837-1d9d-41b1-a2a1-3c16362932cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.647290] env[63345]: DEBUG oslo_concurrency.lockutils [req-2b62f7df-a02c-4047-b297-51e7dde69fb5 req-7d90650b-7958-4b29-bb24-fb999ba3f889 service nova] Lock "64fcf837-1d9d-41b1-a2a1-3c16362932cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.647538] env[63345]: DEBUG nova.compute.manager [req-2b62f7df-a02c-4047-b297-51e7dde69fb5 req-7d90650b-7958-4b29-bb24-fb999ba3f889 service nova] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] No waiting events found dispatching network-vif-plugged-6a41176d-7c10-4226-8332-eff6eea91574 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 727.647750] env[63345]: WARNING nova.compute.manager [req-2b62f7df-a02c-4047-b297-51e7dde69fb5 req-7d90650b-7958-4b29-bb24-fb999ba3f889 service nova] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Received unexpected event network-vif-plugged-6a41176d-7c10-4226-8332-eff6eea91574 for instance with vm_state building and task_state spawning. [ 727.647964] env[63345]: DEBUG nova.compute.manager [req-2b62f7df-a02c-4047-b297-51e7dde69fb5 req-7d90650b-7958-4b29-bb24-fb999ba3f889 service nova] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Received event network-changed-6a41176d-7c10-4226-8332-eff6eea91574 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 727.648660] env[63345]: DEBUG nova.compute.manager [req-2b62f7df-a02c-4047-b297-51e7dde69fb5 req-7d90650b-7958-4b29-bb24-fb999ba3f889 service nova] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Refreshing instance network info cache due to event network-changed-6a41176d-7c10-4226-8332-eff6eea91574. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 727.649205] env[63345]: DEBUG oslo_concurrency.lockutils [req-2b62f7df-a02c-4047-b297-51e7dde69fb5 req-7d90650b-7958-4b29-bb24-fb999ba3f889 service nova] Acquiring lock "refresh_cache-64fcf837-1d9d-41b1-a2a1-3c16362932cf" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.649205] env[63345]: DEBUG oslo_concurrency.lockutils [req-2b62f7df-a02c-4047-b297-51e7dde69fb5 req-7d90650b-7958-4b29-bb24-fb999ba3f889 service nova] Acquired lock "refresh_cache-64fcf837-1d9d-41b1-a2a1-3c16362932cf" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.649364] env[63345]: DEBUG nova.network.neutron [req-2b62f7df-a02c-4047-b297-51e7dde69fb5 req-7d90650b-7958-4b29-bb24-fb999ba3f889 service nova] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Refreshing network info cache for port 6a41176d-7c10-4226-8332-eff6eea91574 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 727.666862] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52bd4ad3-bb39-dcd9-b864-70984779cbbb, 'name': SearchDatastore_Task, 'duration_secs': 0.00859} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.667225] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.667526] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 727.667827] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.668035] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.668302] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 727.668620] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94fb188c-4132-4a77-a4a7-1342cc53597c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.677398] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 727.677640] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 727.678509] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e72501e-3efa-4b7a-b155-1ed1625ba5fa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.685155] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 727.685155] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f30b2e-cd8e-a832-6f4e-5aabeb329189" [ 727.685155] env[63345]: _type = "Task" [ 727.685155] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.694574] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f30b2e-cd8e-a832-6f4e-5aabeb329189, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.771318] env[63345]: DEBUG nova.compute.utils [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 727.776047] env[63345]: DEBUG nova.compute.manager [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 727.776239] env[63345]: DEBUG nova.network.neutron [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 727.819856] env[63345]: DEBUG nova.policy [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '19eb765159554cc6aca9821ca64a5cac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e6bcd39225b4bc5b1ac79111b46dd9e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 727.896547] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Acquiring lock "refresh_cache-64fcf837-1d9d-41b1-a2a1-3c16362932cf" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.968687] env[63345]: DEBUG nova.compute.manager [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 727.968941] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 727.969899] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99eeb7cb-6833-4487-adfd-484c815ee1e0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.980671] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 727.981326] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-273dfdf4-0203-4483-ad78-f971a8841d43 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.992689] env[63345]: DEBUG oslo_vmware.api [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 727.992689] env[63345]: value = "task-1016889" [ 727.992689] env[63345]: _type = "Task" [ 727.992689] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.004570] env[63345]: DEBUG oslo_vmware.api [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016889, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.062064] env[63345]: DEBUG nova.compute.manager [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 728.062361] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 728.062705] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1016888, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483718} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.063539] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69fc56ec-88bd-4c8e-adcb-97f9ca62e308 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.066747] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 85fb1ecd-4ca3-401d-a87a-131f0b275506/85fb1ecd-4ca3-401d-a87a-131f0b275506.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 728.067022] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 728.067333] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9614391-9ac9-401d-95b3-c85c8ec4b6ef {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.075720] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 728.075720] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-15c49ede-9d34-4207-9ea6-927a07f21de4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.077846] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 728.077846] env[63345]: value = "task-1016890" [ 728.077846] env[63345]: _type = "Task" [ 728.077846] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.078501] env[63345]: DEBUG oslo_concurrency.lockutils [req-3f027fa4-59cf-4ed7-ac14-3100e71db818 req-c885119e-2fcd-4d5d-9ec7-b8fa6b7fe774 service nova] Releasing lock "refresh_cache-3101726f-5b14-417e-bcf8-390ce1f9b467" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.085632] env[63345]: DEBUG oslo_vmware.api [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 728.085632] env[63345]: value = "task-1016891" [ 728.085632] env[63345]: _type = "Task" [ 728.085632] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.090643] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1016890, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.097303] env[63345]: DEBUG oslo_vmware.api [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016891, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.154846] env[63345]: DEBUG nova.network.neutron [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Successfully created port: 9bf872ef-9bac-41a4-b3eb-319d2572fee6 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 728.198152] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f30b2e-cd8e-a832-6f4e-5aabeb329189, 'name': SearchDatastore_Task, 'duration_secs': 0.009557} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.201629] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab1d6efc-02f1-4489-a536-51bce3acdff6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.203457] env[63345]: DEBUG nova.network.neutron [req-2b62f7df-a02c-4047-b297-51e7dde69fb5 req-7d90650b-7958-4b29-bb24-fb999ba3f889 service nova] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 728.208905] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 728.208905] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5251ff15-7eb7-dbbc-64c9-9b64952f7fde" [ 728.208905] env[63345]: _type = "Task" [ 728.208905] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.220602] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5251ff15-7eb7-dbbc-64c9-9b64952f7fde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.283611] env[63345]: DEBUG nova.compute.manager [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 728.351191] env[63345]: DEBUG nova.network.neutron [req-2b62f7df-a02c-4047-b297-51e7dde69fb5 req-7d90650b-7958-4b29-bb24-fb999ba3f889 service nova] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.504282] env[63345]: DEBUG oslo_vmware.api [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016889, 'name': PowerOffVM_Task, 'duration_secs': 0.202374} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.506631] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 728.506818] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 728.507798] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a254be77-2af7-4c9b-89ea-a1fb55edbd8d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.583059] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 728.583294] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 728.583474] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Deleting the datastore file [datastore2] 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 728.584066] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a5aa2b61-b72f-4da6-9eda-3d8561d598db {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.588806] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1016890, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071571} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.593874] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 728.594810] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f2b447-fe2a-4f17-a76a-c360a3f82c53 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.598497] env[63345]: DEBUG oslo_vmware.api [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 728.598497] env[63345]: value = "task-1016893" [ 728.598497] env[63345]: _type = "Task" [ 728.598497] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.621258] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] 85fb1ecd-4ca3-401d-a87a-131f0b275506/85fb1ecd-4ca3-401d-a87a-131f0b275506.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 728.621371] env[63345]: DEBUG oslo_vmware.api [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016891, 'name': PowerOffVM_Task, 'duration_secs': 0.188107} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.624028] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1104da1e-48ed-4952-8088-06eee2330767 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.638135] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 728.638324] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 728.641794] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf69af90-843e-4904-b17a-df60886f4c23 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.643872] env[63345]: DEBUG oslo_vmware.api [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016893, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.649671] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 728.649671] env[63345]: value = "task-1016895" [ 728.649671] env[63345]: _type = "Task" [ 728.649671] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.661033] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1016895, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.708914] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 728.709192] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 728.709448] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Deleting the datastore file [datastore2] 34e0234c-36c4-4878-979b-46f045bd1785 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 728.709769] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41a73e5d-641e-47df-8b42-1540d4fc5c4c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.724632] env[63345]: DEBUG oslo_vmware.api [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for the task: (returnval){ [ 728.724632] env[63345]: value = "task-1016896" [ 728.724632] env[63345]: _type = "Task" [ 728.724632] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.724632] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5251ff15-7eb7-dbbc-64c9-9b64952f7fde, 'name': SearchDatastore_Task, 'duration_secs': 0.018689} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.724632] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.724962] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 3101726f-5b14-417e-bcf8-390ce1f9b467/3101726f-5b14-417e-bcf8-390ce1f9b467.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 728.729304] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a102ebc6-0584-4874-8a54-8e2c872bf905 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.736991] env[63345]: DEBUG oslo_vmware.api [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016896, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.740695] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 728.740695] env[63345]: value = "task-1016897" [ 728.740695] env[63345]: _type = "Task" [ 728.740695] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.748833] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016897, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.787511] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c123d1-4e9a-4640-8d3c-65701fe67569 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.795473] env[63345]: INFO nova.virt.block_device [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Booting with volume fcaa69f8-c32a-43e4-8f84-b58e01f1b245 at /dev/sda [ 728.798870] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3ab981-d947-4f3a-b72b-a1ef4aa495d3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.832679] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7696b93-ac4e-4c86-a7df-14899e3e158d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.837755] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7775b8ff-b5be-463e-93a9-f67e92b7ef9e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.846224] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f551ea-c692-4cb7-9315-964d34584449 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.852675] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b6b6cd-21d7-4ed0-8593-dd6829d32659 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.863531] env[63345]: DEBUG oslo_concurrency.lockutils [req-2b62f7df-a02c-4047-b297-51e7dde69fb5 req-7d90650b-7958-4b29-bb24-fb999ba3f889 service nova] Releasing lock "refresh_cache-64fcf837-1d9d-41b1-a2a1-3c16362932cf" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.863977] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Acquired lock "refresh_cache-64fcf837-1d9d-41b1-a2a1-3c16362932cf" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.864180] env[63345]: DEBUG nova.network.neutron [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 728.874812] env[63345]: DEBUG nova.compute.provider_tree [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.887522] env[63345]: DEBUG nova.scheduler.client.report [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 728.890444] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e60779a9-6578-4600-9e03-331e1963df37 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.900178] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bbd6fce-f6b6-48d1-b03d-8958c3fb9e88 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.934019] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398bff5b-6d12-40e5-b70b-29087ceb05e8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.941014] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb1de06-5997-4193-adea-c2babdba96c2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.956120] env[63345]: DEBUG nova.virt.block_device [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Updating existing volume attachment record: 0e903d04-6f39-4491-b35f-9bcf32362a62 {{(pid=63345) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 729.109503] env[63345]: DEBUG oslo_vmware.api [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016893, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143213} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.109775] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 729.109969] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 729.110166] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 729.110367] env[63345]: INFO nova.compute.manager [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 729.110616] env[63345]: DEBUG oslo.service.loopingcall [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 729.110813] env[63345]: DEBUG nova.compute.manager [-] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 729.110907] env[63345]: DEBUG nova.network.neutron [-] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 729.161238] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1016895, 'name': ReconfigVM_Task, 'duration_secs': 0.314412} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.161570] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Reconfigured VM instance instance-0000002e to attach disk [datastore2] 85fb1ecd-4ca3-401d-a87a-131f0b275506/85fb1ecd-4ca3-401d-a87a-131f0b275506.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 729.162228] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64adbae9-1725-4506-86cc-2c11ac235dee {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.168927] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 729.168927] env[63345]: value = "task-1016898" [ 729.168927] env[63345]: _type = "Task" [ 729.168927] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.177318] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1016898, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.233638] env[63345]: DEBUG oslo_vmware.api [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Task: {'id': task-1016896, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144632} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.233933] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 729.234131] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 729.234327] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 729.234492] env[63345]: INFO nova.compute.manager [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Took 1.17 seconds to destroy the instance on the hypervisor. [ 729.234727] env[63345]: DEBUG oslo.service.loopingcall [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 729.234916] env[63345]: DEBUG nova.compute.manager [-] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 729.235035] env[63345]: DEBUG nova.network.neutron [-] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 729.250318] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016897, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501288} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.250586] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 3101726f-5b14-417e-bcf8-390ce1f9b467/3101726f-5b14-417e-bcf8-390ce1f9b467.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 729.250815] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 729.251124] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-024bc16d-8096-4f87-acb8-fc99fecfed14 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.257265] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 729.257265] env[63345]: value = "task-1016899" [ 729.257265] env[63345]: _type = "Task" [ 729.257265] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.265105] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016899, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.393564] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.129s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.394098] env[63345]: DEBUG nova.compute.manager [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 729.398195] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.084s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.398440] env[63345]: DEBUG nova.objects.instance [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Lazy-loading 'resources' on Instance uuid c07c7f5d-a674-458f-8253-1bc2d61be6c1 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 729.419041] env[63345]: DEBUG nova.network.neutron [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 729.642043] env[63345]: DEBUG nova.network.neutron [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Updating instance_info_cache with network_info: [{"id": "6a41176d-7c10-4226-8332-eff6eea91574", "address": "fa:16:3e:52:23:1e", "network": {"id": "dcdb27bf-e425-4c7c-97ef-b0711fe802d2", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-826299688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f769fb06b3c4d32a9aa2b99943344ab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a41176d-7c", "ovs_interfaceid": "6a41176d-7c10-4226-8332-eff6eea91574", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.683426] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1016898, 'name': Rename_Task, 'duration_secs': 0.156477} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.683693] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 729.683926] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad2618ff-11c1-407d-b9d1-fac9e175857e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.692018] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 729.692018] env[63345]: value = "task-1016900" [ 729.692018] env[63345]: _type = "Task" [ 729.692018] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.699780] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1016900, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.768815] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016899, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067458} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.769577] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 729.770537] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4627034-13c1-4c57-a8ad-3245fb9561bd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.798356] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] 3101726f-5b14-417e-bcf8-390ce1f9b467/3101726f-5b14-417e-bcf8-390ce1f9b467.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 729.798688] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d241c17c-83e8-43fe-91cc-2ebe58bc5300 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.817325] env[63345]: DEBUG nova.network.neutron [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Successfully updated port: 9bf872ef-9bac-41a4-b3eb-319d2572fee6 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 729.828287] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 729.828287] env[63345]: value = "task-1016901" [ 729.828287] env[63345]: _type = "Task" [ 729.828287] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.839400] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016901, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.882469] env[63345]: DEBUG nova.compute.manager [req-e761768e-05f4-4938-9626-cad23b10cacc req-39a69bb1-d8ad-4063-a1eb-457a3849eeba service nova] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Received event network-vif-plugged-9bf872ef-9bac-41a4-b3eb-319d2572fee6 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 729.882553] env[63345]: DEBUG oslo_concurrency.lockutils [req-e761768e-05f4-4938-9626-cad23b10cacc req-39a69bb1-d8ad-4063-a1eb-457a3849eeba service nova] Acquiring lock "3b0d115d-dad5-4881-a0e0-b98f555da533-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.882775] env[63345]: DEBUG oslo_concurrency.lockutils [req-e761768e-05f4-4938-9626-cad23b10cacc req-39a69bb1-d8ad-4063-a1eb-457a3849eeba service nova] Lock "3b0d115d-dad5-4881-a0e0-b98f555da533-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.882951] env[63345]: DEBUG oslo_concurrency.lockutils [req-e761768e-05f4-4938-9626-cad23b10cacc req-39a69bb1-d8ad-4063-a1eb-457a3849eeba service nova] Lock "3b0d115d-dad5-4881-a0e0-b98f555da533-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.883137] env[63345]: DEBUG nova.compute.manager [req-e761768e-05f4-4938-9626-cad23b10cacc req-39a69bb1-d8ad-4063-a1eb-457a3849eeba service nova] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] No waiting events found dispatching network-vif-plugged-9bf872ef-9bac-41a4-b3eb-319d2572fee6 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 729.883300] env[63345]: WARNING nova.compute.manager [req-e761768e-05f4-4938-9626-cad23b10cacc req-39a69bb1-d8ad-4063-a1eb-457a3849eeba service nova] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Received unexpected event network-vif-plugged-9bf872ef-9bac-41a4-b3eb-319d2572fee6 for instance with vm_state building and task_state block_device_mapping. [ 729.899755] env[63345]: DEBUG nova.compute.utils [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 729.901786] env[63345]: DEBUG nova.compute.manager [req-33b66626-63bd-442b-8845-364ba215e2e0 req-a33a7b07-d7d3-4447-b867-36de4434b1ec service nova] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Received event network-vif-deleted-613b8631-b2ef-4da4-8e79-67f2fda08ab5 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 729.901974] env[63345]: INFO nova.compute.manager [req-33b66626-63bd-442b-8845-364ba215e2e0 req-a33a7b07-d7d3-4447-b867-36de4434b1ec service nova] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Neutron deleted interface 613b8631-b2ef-4da4-8e79-67f2fda08ab5; detaching it from the instance and deleting it from the info cache [ 729.902183] env[63345]: DEBUG nova.network.neutron [req-33b66626-63bd-442b-8845-364ba215e2e0 req-a33a7b07-d7d3-4447-b867-36de4434b1ec service nova] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.906358] env[63345]: DEBUG nova.compute.manager [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 729.906358] env[63345]: DEBUG nova.network.neutron [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 729.987968] env[63345]: DEBUG nova.policy [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd58b1b41d48e44788f4667bf686352de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8f0343855b6147f38b0cb3f2c72330e0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 730.144344] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Releasing lock "refresh_cache-64fcf837-1d9d-41b1-a2a1-3c16362932cf" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.144683] env[63345]: DEBUG nova.compute.manager [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Instance network_info: |[{"id": "6a41176d-7c10-4226-8332-eff6eea91574", "address": "fa:16:3e:52:23:1e", "network": {"id": "dcdb27bf-e425-4c7c-97ef-b0711fe802d2", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-826299688-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f769fb06b3c4d32a9aa2b99943344ab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a41176d-7c", "ovs_interfaceid": "6a41176d-7c10-4226-8332-eff6eea91574", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 730.145487] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:23:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8459aaf-d6a8-46fb-ad14-464ac3104695', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6a41176d-7c10-4226-8332-eff6eea91574', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 730.154628] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Creating folder: Project (9f769fb06b3c4d32a9aa2b99943344ab). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 730.156119] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86ae276f-3538-445a-acf1-60586b47e94e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.166216] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Created folder: Project (9f769fb06b3c4d32a9aa2b99943344ab) in parent group-v225918. [ 730.166415] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Creating folder: Instances. Parent ref: group-v225996. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 730.166653] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eea3f39a-72e8-4f27-8b88-5991f6334226 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.175139] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Created folder: Instances in parent group-v225996. [ 730.175388] env[63345]: DEBUG oslo.service.loopingcall [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 730.175585] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 730.175792] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39fcc453-aa80-4f24-bae3-2ea5d80bf83c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.203657] env[63345]: DEBUG oslo_vmware.api [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1016900, 'name': PowerOnVM_Task, 'duration_secs': 0.472552} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.205045] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 730.205384] env[63345]: INFO nova.compute.manager [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Took 8.35 seconds to spawn the instance on the hypervisor. [ 730.205560] env[63345]: DEBUG nova.compute.manager [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 730.205823] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 730.205823] env[63345]: value = "task-1016904" [ 730.205823] env[63345]: _type = "Task" [ 730.205823] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.208775] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06a572c-7de6-411c-a668-3aecf83193a5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.211809] env[63345]: DEBUG nova.network.neutron [-] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.228366] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016904, 'name': CreateVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.327493] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Acquiring lock "refresh_cache-3b0d115d-dad5-4881-a0e0-b98f555da533" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.327493] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Acquired lock "refresh_cache-3b0d115d-dad5-4881-a0e0-b98f555da533" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.327493] env[63345]: DEBUG nova.network.neutron [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 730.340737] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016901, 'name': ReconfigVM_Task, 'duration_secs': 0.293972} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.343480] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Reconfigured VM instance instance-0000002f to attach disk [datastore2] 3101726f-5b14-417e-bcf8-390ce1f9b467/3101726f-5b14-417e-bcf8-390ce1f9b467.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 730.344780] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-26738ea9-3199-452f-80a9-2b2485a23a98 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.355024] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 730.355024] env[63345]: value = "task-1016905" [ 730.355024] env[63345]: _type = "Task" [ 730.355024] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.358734] env[63345]: DEBUG nova.network.neutron [-] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.368184] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016905, 'name': Rename_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.404367] env[63345]: DEBUG nova.compute.manager [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 730.413343] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-99e4cb4b-f19e-4048-92e1-25f1bab8fe29 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.423810] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7feacff0-5470-452e-8211-68d99f73898d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.461658] env[63345]: DEBUG nova.compute.manager [req-33b66626-63bd-442b-8845-364ba215e2e0 req-a33a7b07-d7d3-4447-b867-36de4434b1ec service nova] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Detach interface failed, port_id=613b8631-b2ef-4da4-8e79-67f2fda08ab5, reason: Instance 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 730.475998] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fff83b8-fd4b-47d7-833d-6e41b62c2085 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.483932] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1780bb1e-124e-45f1-867f-5f041fe3e34c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.487942] env[63345]: DEBUG nova.network.neutron [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Successfully created port: 4ea770ff-4619-4df2-b09f-53b1fdc250e5 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 730.516952] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56bcd850-7115-4127-b009-17c7b4e6eedd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.524659] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-300bcdf7-f94f-4fcb-a673-8000bfce3edb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.539779] env[63345]: DEBUG nova.compute.provider_tree [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.301268] env[63345]: INFO nova.compute.manager [-] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Took 2.19 seconds to deallocate network for instance. [ 731.308317] env[63345]: INFO nova.compute.manager [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Took 38.76 seconds to build instance. [ 731.311338] env[63345]: INFO nova.compute.manager [-] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Took 2.08 seconds to deallocate network for instance. [ 731.315165] env[63345]: DEBUG nova.scheduler.client.report [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 731.318836] env[63345]: DEBUG nova.compute.manager [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 731.322299] env[63345]: DEBUG nova.virt.hardware [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 731.322299] env[63345]: DEBUG nova.virt.hardware [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 731.322299] env[63345]: DEBUG nova.virt.hardware [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 731.322299] env[63345]: DEBUG nova.virt.hardware [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 731.322544] env[63345]: DEBUG nova.virt.hardware [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 731.322544] env[63345]: DEBUG nova.virt.hardware [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 731.322544] env[63345]: DEBUG nova.virt.hardware [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 731.322544] env[63345]: DEBUG nova.virt.hardware [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 731.322544] env[63345]: DEBUG nova.virt.hardware [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 731.322711] env[63345]: DEBUG nova.virt.hardware [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 731.322711] env[63345]: DEBUG nova.virt.hardware [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 731.336011] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4c412b-892b-47de-aa95-cfe2f9f76de4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.351756] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016904, 'name': CreateVM_Task, 'duration_secs': 0.35952} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.352124] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016905, 'name': Rename_Task, 'duration_secs': 0.205767} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.352292] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 731.353527] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03003cf-7d11-4ebb-ba7d-680b08384de2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.357934] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 731.358430] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.358592] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.358891] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 731.359504] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02239613-ebed-4423-bddb-df0267b0fece {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.361278] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab7bc8fd-c7cd-4ce1-a920-b50fe2de1ecc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.366231] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Waiting for the task: (returnval){ [ 731.366231] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52612315-54ff-d191-549d-264a16e3727d" [ 731.366231] env[63345]: _type = "Task" [ 731.366231] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.384939] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 731.384939] env[63345]: value = "task-1016906" [ 731.384939] env[63345]: _type = "Task" [ 731.384939] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.385740] env[63345]: DEBUG nova.network.neutron [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 731.395197] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52612315-54ff-d191-549d-264a16e3727d, 'name': SearchDatastore_Task, 'duration_secs': 0.009767} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.395393] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.395621] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 731.395842] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.396000] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.396206] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 731.396450] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-142ae64f-8999-481e-bfe7-cc244d16f02d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.401426] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016906, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.410534] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 731.411408] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 731.411556] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b4b784a-5620-48cd-ad93-278e72e446f1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.417146] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Waiting for the task: (returnval){ [ 731.417146] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d02f49-74ab-7fea-2718-3e9af11861fd" [ 731.417146] env[63345]: _type = "Task" [ 731.417146] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.425601] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d02f49-74ab-7fea-2718-3e9af11861fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.566026] env[63345]: DEBUG nova.network.neutron [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Updating instance_info_cache with network_info: [{"id": "9bf872ef-9bac-41a4-b3eb-319d2572fee6", "address": "fa:16:3e:e6:33:e6", "network": {"id": "a44daa6a-6666-4277-911d-306e7f499492", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1563048768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6bcd39225b4bc5b1ac79111b46dd9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bf872ef-9b", "ovs_interfaceid": "9bf872ef-9bac-41a4-b3eb-319d2572fee6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.810226] env[63345]: DEBUG oslo_concurrency.lockutils [None req-88b6e3ef-b83f-4933-977c-3790535be89e tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "85fb1ecd-4ca3-401d-a87a-131f0b275506" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 130.663s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.826514] env[63345]: DEBUG nova.compute.manager [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 731.826514] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.428s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.828122] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.717s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.833166] env[63345]: INFO nova.compute.claims [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 731.846210] env[63345]: DEBUG oslo_concurrency.lockutils [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.846210] env[63345]: DEBUG oslo_concurrency.lockutils [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.874246] env[63345]: DEBUG nova.virt.hardware [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 731.874506] env[63345]: DEBUG nova.virt.hardware [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 731.874658] env[63345]: DEBUG nova.virt.hardware [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 731.874835] env[63345]: DEBUG nova.virt.hardware [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 731.874970] env[63345]: DEBUG nova.virt.hardware [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 731.875125] env[63345]: DEBUG nova.virt.hardware [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 731.875334] env[63345]: DEBUG nova.virt.hardware [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 731.875555] env[63345]: DEBUG nova.virt.hardware [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 731.875731] env[63345]: DEBUG nova.virt.hardware [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 731.875893] env[63345]: DEBUG nova.virt.hardware [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 731.876101] env[63345]: DEBUG nova.virt.hardware [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 731.877294] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae67050-a5c3-4400-90f9-72927cf99f95 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.885883] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b22e906-e6c1-47f0-b771-8eb1cbc0985e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.912085] env[63345]: DEBUG oslo_vmware.api [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016906, 'name': PowerOnVM_Task, 'duration_secs': 0.446101} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.913343] env[63345]: INFO nova.scheduler.client.report [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Deleted allocations for instance c07c7f5d-a674-458f-8253-1bc2d61be6c1 [ 731.914675] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 731.915079] env[63345]: INFO nova.compute.manager [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Took 7.94 seconds to spawn the instance on the hypervisor. [ 731.915432] env[63345]: DEBUG nova.compute.manager [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 731.919021] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d5707a-d059-463e-af01-9018afb443bd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.934385] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d02f49-74ab-7fea-2718-3e9af11861fd, 'name': SearchDatastore_Task, 'duration_secs': 0.00881} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.940730] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37913bb9-9133-4957-941b-4b09780682fa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.948291] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Waiting for the task: (returnval){ [ 731.948291] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]526531ae-e04b-81cf-d941-8e86be478821" [ 731.948291] env[63345]: _type = "Task" [ 731.948291] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.972495] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526531ae-e04b-81cf-d941-8e86be478821, 'name': SearchDatastore_Task, 'duration_secs': 0.010564} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.972495] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.972495] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 64fcf837-1d9d-41b1-a2a1-3c16362932cf/64fcf837-1d9d-41b1-a2a1-3c16362932cf.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 731.972495] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2c49adea-a84e-49d0-b364-992de858286b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.974403] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Waiting for the task: (returnval){ [ 731.974403] env[63345]: value = "task-1016907" [ 731.974403] env[63345]: _type = "Task" [ 731.974403] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.986997] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': task-1016907, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.068894] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Releasing lock "refresh_cache-3b0d115d-dad5-4881-a0e0-b98f555da533" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.069255] env[63345]: DEBUG nova.compute.manager [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Instance network_info: |[{"id": "9bf872ef-9bac-41a4-b3eb-319d2572fee6", "address": "fa:16:3e:e6:33:e6", "network": {"id": "a44daa6a-6666-4277-911d-306e7f499492", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1563048768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6bcd39225b4bc5b1ac79111b46dd9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bf872ef-9b", "ovs_interfaceid": "9bf872ef-9bac-41a4-b3eb-319d2572fee6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 732.069762] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:33:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b399c74-1411-408a-b4cd-84e268ae83fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9bf872ef-9bac-41a4-b3eb-319d2572fee6', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 732.079396] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Creating folder: Project (4e6bcd39225b4bc5b1ac79111b46dd9e). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 732.080983] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65abefff-2280-453c-80b2-fc025d220585 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.084406] env[63345]: DEBUG nova.compute.manager [req-0adf2eb8-c9a8-4e7d-bf58-1cffb1c41599 req-9ae56c45-b3ca-42b5-ae4f-c5691a6631a7 service nova] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Received event network-changed-9bf872ef-9bac-41a4-b3eb-319d2572fee6 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 732.084665] env[63345]: DEBUG nova.compute.manager [req-0adf2eb8-c9a8-4e7d-bf58-1cffb1c41599 req-9ae56c45-b3ca-42b5-ae4f-c5691a6631a7 service nova] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Refreshing instance network info cache due to event network-changed-9bf872ef-9bac-41a4-b3eb-319d2572fee6. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 732.084962] env[63345]: DEBUG oslo_concurrency.lockutils [req-0adf2eb8-c9a8-4e7d-bf58-1cffb1c41599 req-9ae56c45-b3ca-42b5-ae4f-c5691a6631a7 service nova] Acquiring lock "refresh_cache-3b0d115d-dad5-4881-a0e0-b98f555da533" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.085240] env[63345]: DEBUG oslo_concurrency.lockutils [req-0adf2eb8-c9a8-4e7d-bf58-1cffb1c41599 req-9ae56c45-b3ca-42b5-ae4f-c5691a6631a7 service nova] Acquired lock "refresh_cache-3b0d115d-dad5-4881-a0e0-b98f555da533" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.085513] env[63345]: DEBUG nova.network.neutron [req-0adf2eb8-c9a8-4e7d-bf58-1cffb1c41599 req-9ae56c45-b3ca-42b5-ae4f-c5691a6631a7 service nova] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Refreshing network info cache for port 9bf872ef-9bac-41a4-b3eb-319d2572fee6 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 732.097566] env[63345]: DEBUG nova.compute.manager [req-40e6e6c5-521e-458f-8484-74d3cf371c5d req-089f86d4-a5f4-4f80-a77b-05f1a1782ba9 service nova] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Received event network-vif-deleted-11bc09f4-1dfe-4b4b-8647-e126d27ae4b4 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 732.100403] env[63345]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 732.100403] env[63345]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63345) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 732.100403] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Folder already exists: Project (4e6bcd39225b4bc5b1ac79111b46dd9e). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 732.100547] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Creating folder: Instances. Parent ref: group-v225934. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 732.100782] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1082d7ce-7090-40ba-9c5f-00f99dddea8b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.111791] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Created folder: Instances in parent group-v225934. [ 732.112051] env[63345]: DEBUG oslo.service.loopingcall [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 732.112245] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 732.112456] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc94113e-9aea-4985-8918-45c557e89a9a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.134220] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 732.134220] env[63345]: value = "task-1016910" [ 732.134220] env[63345]: _type = "Task" [ 732.134220] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.146570] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016910, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.313303] env[63345]: DEBUG nova.compute.manager [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 732.432237] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b39a0d7f-3eb6-4262-995b-85516b6ff111 tempest-ServerDiagnosticsNegativeTest-2074467023 tempest-ServerDiagnosticsNegativeTest-2074467023-project-member] Lock "c07c7f5d-a674-458f-8253-1bc2d61be6c1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.740s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.450150] env[63345]: INFO nova.compute.manager [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Took 32.03 seconds to build instance. [ 732.484950] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': task-1016907, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458253} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.485227] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 64fcf837-1d9d-41b1-a2a1-3c16362932cf/64fcf837-1d9d-41b1-a2a1-3c16362932cf.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 732.486028] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 732.486335] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c4f91ab-dfe6-4a3e-91e4-e82a30058cc6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.493091] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Waiting for the task: (returnval){ [ 732.493091] env[63345]: value = "task-1016911" [ 732.493091] env[63345]: _type = "Task" [ 732.493091] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.501840] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': task-1016911, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.644018] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016910, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.809023] env[63345]: DEBUG nova.network.neutron [req-0adf2eb8-c9a8-4e7d-bf58-1cffb1c41599 req-9ae56c45-b3ca-42b5-ae4f-c5691a6631a7 service nova] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Updated VIF entry in instance network info cache for port 9bf872ef-9bac-41a4-b3eb-319d2572fee6. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 732.812151] env[63345]: DEBUG nova.network.neutron [req-0adf2eb8-c9a8-4e7d-bf58-1cffb1c41599 req-9ae56c45-b3ca-42b5-ae4f-c5691a6631a7 service nova] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Updating instance_info_cache with network_info: [{"id": "9bf872ef-9bac-41a4-b3eb-319d2572fee6", "address": "fa:16:3e:e6:33:e6", "network": {"id": "a44daa6a-6666-4277-911d-306e7f499492", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1563048768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6bcd39225b4bc5b1ac79111b46dd9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bf872ef-9b", "ovs_interfaceid": "9bf872ef-9bac-41a4-b3eb-319d2572fee6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.841317] env[63345]: DEBUG oslo_concurrency.lockutils [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.915824] env[63345]: DEBUG nova.network.neutron [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Successfully updated port: 4ea770ff-4619-4df2-b09f-53b1fdc250e5 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 732.952210] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd4fa8bd-2b95-4066-9468-836cc4f4bd1e tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "3101726f-5b14-417e-bcf8-390ce1f9b467" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 127.203s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.008514] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': task-1016911, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.211476} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.008824] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 733.009757] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2343322c-e5d1-4835-95b8-567b7e6f9b30 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.038764] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] 64fcf837-1d9d-41b1-a2a1-3c16362932cf/64fcf837-1d9d-41b1-a2a1-3c16362932cf.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 733.041851] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30d4a19e-fd7b-47c0-8b58-d25a44cc62b5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.062255] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Waiting for the task: (returnval){ [ 733.062255] env[63345]: value = "task-1016912" [ 733.062255] env[63345]: _type = "Task" [ 733.062255] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.074151] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': task-1016912, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.145356] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016910, 'name': CreateVM_Task, 'duration_secs': 0.548165} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.147729] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 733.148893] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-225937', 'volume_id': 'fcaa69f8-c32a-43e4-8f84-b58e01f1b245', 'name': 'volume-fcaa69f8-c32a-43e4-8f84-b58e01f1b245', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b0d115d-dad5-4881-a0e0-b98f555da533', 'attached_at': '', 'detached_at': '', 'volume_id': 'fcaa69f8-c32a-43e4-8f84-b58e01f1b245', 'serial': 'fcaa69f8-c32a-43e4-8f84-b58e01f1b245'}, 'delete_on_termination': True, 'mount_device': '/dev/sda', 'device_type': None, 'boot_index': 0, 'disk_bus': None, 'guest_format': None, 'attachment_id': '0e903d04-6f39-4491-b35f-9bcf32362a62', 'volume_type': None}], 'swap': None} {{(pid=63345) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 733.148893] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Root volume attach. Driver type: vmdk {{(pid=63345) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 733.152306] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3965446-e380-427b-ae8e-7cef82f797ef {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.157132] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee22dd18-29c5-4860-bd88-f54307b6d855 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.165594] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062026fd-6959-473d-a868-a379cc37537b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.171293] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-4198dce3-861f-4f7a-a6b5-7000aa05b2f3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.180733] env[63345]: DEBUG oslo_vmware.api [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Waiting for the task: (returnval){ [ 733.180733] env[63345]: value = "task-1016913" [ 733.180733] env[63345]: _type = "Task" [ 733.180733] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.187392] env[63345]: DEBUG oslo_vmware.api [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1016913, 'name': RelocateVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.318468] env[63345]: DEBUG oslo_concurrency.lockutils [req-0adf2eb8-c9a8-4e7d-bf58-1cffb1c41599 req-9ae56c45-b3ca-42b5-ae4f-c5691a6631a7 service nova] Releasing lock "refresh_cache-3b0d115d-dad5-4881-a0e0-b98f555da533" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.396531] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b493d0dc-0154-4871-ab00-8b2483739945 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.405170] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af5129a-1ff5-4a75-b320-dc06e0cc2696 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.441038] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquiring lock "refresh_cache-4a59b565-571f-48ef-97bd-bed9853e2d8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.441038] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquired lock "refresh_cache-4a59b565-571f-48ef-97bd-bed9853e2d8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.441038] env[63345]: DEBUG nova.network.neutron [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 733.443054] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd5adce-44db-41ee-a100-e4c88bb835bd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.451458] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15762b2-659c-4aeb-add8-4b7c8bb1eb0a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.457657] env[63345]: DEBUG nova.compute.manager [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 733.473435] env[63345]: DEBUG nova.compute.provider_tree [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.504646] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 733.504646] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 733.574270] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': task-1016912, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.693424] env[63345]: DEBUG oslo_vmware.api [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1016913, 'name': RelocateVM_Task} progress is 20%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.976418] env[63345]: DEBUG nova.scheduler.client.report [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 733.993623] env[63345]: DEBUG nova.network.neutron [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 734.002400] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.013073] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 734.013414] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Starting heal instance info cache {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 734.013811] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Rebuilding the list of instances to heal {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10261}} [ 734.079312] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': task-1016912, 'name': ReconfigVM_Task, 'duration_secs': 0.634396} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.079682] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Reconfigured VM instance instance-00000030 to attach disk [datastore2] 64fcf837-1d9d-41b1-a2a1-3c16362932cf/64fcf837-1d9d-41b1-a2a1-3c16362932cf.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 734.080372] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d372ffbd-5ec0-4c6f-bfbb-1c4b6412a2c4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.088870] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Waiting for the task: (returnval){ [ 734.088870] env[63345]: value = "task-1016914" [ 734.088870] env[63345]: _type = "Task" [ 734.088870] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.095795] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': task-1016914, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.196081] env[63345]: DEBUG oslo_vmware.api [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1016913, 'name': RelocateVM_Task, 'duration_secs': 0.712412} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.196559] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Volume attach. Driver type: vmdk {{(pid=63345) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 734.196794] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-225937', 'volume_id': 'fcaa69f8-c32a-43e4-8f84-b58e01f1b245', 'name': 'volume-fcaa69f8-c32a-43e4-8f84-b58e01f1b245', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b0d115d-dad5-4881-a0e0-b98f555da533', 'attached_at': '', 'detached_at': '', 'volume_id': 'fcaa69f8-c32a-43e4-8f84-b58e01f1b245', 'serial': 'fcaa69f8-c32a-43e4-8f84-b58e01f1b245'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 734.199949] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e679ea-34fc-41ae-a831-da002d811124 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.220978] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816d25bb-46c3-45f5-8bfe-29c457c62ccd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.245255] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] volume-fcaa69f8-c32a-43e4-8f84-b58e01f1b245/volume-fcaa69f8-c32a-43e4-8f84-b58e01f1b245.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 734.247277] env[63345]: DEBUG nova.network.neutron [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Updating instance_info_cache with network_info: [{"id": "4ea770ff-4619-4df2-b09f-53b1fdc250e5", "address": "fa:16:3e:1e:ce:1c", "network": {"id": "441f27c7-de99-494b-9db5-8e67e3c8e7b6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-592603355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f0343855b6147f38b0cb3f2c72330e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ea770ff-46", "ovs_interfaceid": "4ea770ff-4619-4df2-b09f-53b1fdc250e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.247849] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11fee538-09fa-4d87-afcc-390beddb4477 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.264874] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Releasing lock "refresh_cache-4a59b565-571f-48ef-97bd-bed9853e2d8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.265239] env[63345]: DEBUG nova.compute.manager [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Instance network_info: |[{"id": "4ea770ff-4619-4df2-b09f-53b1fdc250e5", "address": "fa:16:3e:1e:ce:1c", "network": {"id": "441f27c7-de99-494b-9db5-8e67e3c8e7b6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-592603355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f0343855b6147f38b0cb3f2c72330e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ea770ff-46", "ovs_interfaceid": "4ea770ff-4619-4df2-b09f-53b1fdc250e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 734.265918] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:ce:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd5970ab5-34b8-4065-bfa6-f568b8f103b7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ea770ff-4619-4df2-b09f-53b1fdc250e5', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 734.275206] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Creating folder: Project (8f0343855b6147f38b0cb3f2c72330e0). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 734.276494] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-352ae667-c853-4b80-b08d-ec577cd13078 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.282075] env[63345]: DEBUG oslo_vmware.api [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Waiting for the task: (returnval){ [ 734.282075] env[63345]: value = "task-1016915" [ 734.282075] env[63345]: _type = "Task" [ 734.282075] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.288633] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Created folder: Project (8f0343855b6147f38b0cb3f2c72330e0) in parent group-v225918. [ 734.288939] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Creating folder: Instances. Parent ref: group-v226001. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 734.289850] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-274d16fe-60ab-4c20-8dbf-4bdb9dfcc054 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.299523] env[63345]: DEBUG oslo_vmware.api [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1016915, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.307683] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Created folder: Instances in parent group-v226001. [ 734.308370] env[63345]: DEBUG oslo.service.loopingcall [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 734.308525] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 734.308819] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dfcb8738-89a3-449e-993d-7a3e7822f4e7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.342848] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 734.342848] env[63345]: value = "task-1016918" [ 734.342848] env[63345]: _type = "Task" [ 734.342848] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.352517] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016918, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.488438] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.660s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.490860] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.606s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.492431] env[63345]: INFO nova.compute.claims [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 734.530213] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 734.530521] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 734.530730] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 734.531294] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 734.550659] env[63345]: DEBUG nova.compute.manager [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Received event network-vif-plugged-4ea770ff-4619-4df2-b09f-53b1fdc250e5 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 734.550885] env[63345]: DEBUG oslo_concurrency.lockutils [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] Acquiring lock "4a59b565-571f-48ef-97bd-bed9853e2d8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.551222] env[63345]: DEBUG oslo_concurrency.lockutils [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] Lock "4a59b565-571f-48ef-97bd-bed9853e2d8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.551496] env[63345]: DEBUG oslo_concurrency.lockutils [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] Lock "4a59b565-571f-48ef-97bd-bed9853e2d8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.551798] env[63345]: DEBUG nova.compute.manager [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] No waiting events found dispatching network-vif-plugged-4ea770ff-4619-4df2-b09f-53b1fdc250e5 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 734.552020] env[63345]: WARNING nova.compute.manager [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Received unexpected event network-vif-plugged-4ea770ff-4619-4df2-b09f-53b1fdc250e5 for instance with vm_state building and task_state spawning. [ 734.552275] env[63345]: DEBUG nova.compute.manager [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Received event network-changed-989bf403-079f-46b9-ab79-c645cec393aa {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 734.552526] env[63345]: DEBUG nova.compute.manager [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Refreshing instance network info cache due to event network-changed-989bf403-079f-46b9-ab79-c645cec393aa. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 734.552800] env[63345]: DEBUG oslo_concurrency.lockutils [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] Acquiring lock "refresh_cache-85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.553037] env[63345]: DEBUG oslo_concurrency.lockutils [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] Acquired lock "refresh_cache-85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.553268] env[63345]: DEBUG nova.network.neutron [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Refreshing network info cache for port 989bf403-079f-46b9-ab79-c645cec393aa {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 734.573776] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "refresh_cache-bc9d2e6a-f77a-4a21-90bc-81949cbfce91" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.574041] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquired lock "refresh_cache-bc9d2e6a-f77a-4a21-90bc-81949cbfce91" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.574366] env[63345]: DEBUG nova.network.neutron [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Forcefully refreshing network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 734.574611] env[63345]: DEBUG nova.objects.instance [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lazy-loading 'info_cache' on Instance uuid bc9d2e6a-f77a-4a21-90bc-81949cbfce91 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 734.599644] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': task-1016914, 'name': Rename_Task, 'duration_secs': 0.258058} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.599950] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 734.600195] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c4c1265-01a6-4815-aeb0-bfe7883f6fb2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.606726] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Waiting for the task: (returnval){ [ 734.606726] env[63345]: value = "task-1016919" [ 734.606726] env[63345]: _type = "Task" [ 734.606726] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.615615] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': task-1016919, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.797683] env[63345]: DEBUG oslo_vmware.api [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1016915, 'name': ReconfigVM_Task, 'duration_secs': 0.323109} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.798149] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Reconfigured VM instance instance-00000031 to attach disk [datastore2] volume-fcaa69f8-c32a-43e4-8f84-b58e01f1b245/volume-fcaa69f8-c32a-43e4-8f84-b58e01f1b245.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 734.806715] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07b961e0-6e74-4d83-8ab6-be389cd1a346 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.822709] env[63345]: DEBUG oslo_vmware.api [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Waiting for the task: (returnval){ [ 734.822709] env[63345]: value = "task-1016920" [ 734.822709] env[63345]: _type = "Task" [ 734.822709] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.830928] env[63345]: DEBUG oslo_vmware.api [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1016920, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.849521] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016918, 'name': CreateVM_Task, 'duration_secs': 0.43688} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.849685] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 734.850666] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.851091] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.851294] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 734.851449] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e564e313-8c2c-44c5-8979-3e7b438fee07 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.856225] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 734.856225] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c5979a-c531-24ed-6c07-3bfd1d14453a" [ 734.856225] env[63345]: _type = "Task" [ 734.856225] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.866627] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c5979a-c531-24ed-6c07-3bfd1d14453a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.001716] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Acquiring lock "5ca2c330-0dff-40b2-8ed4-9f2962437786" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.001901] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Lock "5ca2c330-0dff-40b2-8ed4-9f2962437786" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.116708] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': task-1016919, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.339121] env[63345]: DEBUG oslo_vmware.api [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1016920, 'name': ReconfigVM_Task, 'duration_secs': 0.252826} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.339853] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-225937', 'volume_id': 'fcaa69f8-c32a-43e4-8f84-b58e01f1b245', 'name': 'volume-fcaa69f8-c32a-43e4-8f84-b58e01f1b245', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b0d115d-dad5-4881-a0e0-b98f555da533', 'attached_at': '', 'detached_at': '', 'volume_id': 'fcaa69f8-c32a-43e4-8f84-b58e01f1b245', 'serial': 'fcaa69f8-c32a-43e4-8f84-b58e01f1b245'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 735.340510] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e0d66f7-5879-4a87-b7bb-c6da50b644fd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.347686] env[63345]: DEBUG oslo_vmware.api [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Waiting for the task: (returnval){ [ 735.347686] env[63345]: value = "task-1016921" [ 735.347686] env[63345]: _type = "Task" [ 735.347686] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.359976] env[63345]: DEBUG oslo_vmware.api [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1016921, 'name': Rename_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.368609] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c5979a-c531-24ed-6c07-3bfd1d14453a, 'name': SearchDatastore_Task, 'duration_secs': 0.012395} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.368902] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.369151] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 735.369382] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.369516] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.369690] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 735.372372] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75fe97bb-207e-4bb3-a973-a57f7baa6567 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.381986] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 735.381986] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 735.382726] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f2bfde1-0444-4d91-bdf0-23465ac13dff {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.389055] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 735.389055] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]524026eb-4f40-e82b-d52b-f711b956ec9c" [ 735.389055] env[63345]: _type = "Task" [ 735.389055] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.396861] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524026eb-4f40-e82b-d52b-f711b956ec9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.506667] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Lock "5ca2c330-0dff-40b2-8ed4-9f2962437786" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.505s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.507872] env[63345]: DEBUG nova.compute.manager [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 735.560627] env[63345]: DEBUG nova.network.neutron [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Updated VIF entry in instance network info cache for port 989bf403-079f-46b9-ab79-c645cec393aa. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 735.561014] env[63345]: DEBUG nova.network.neutron [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Updating instance_info_cache with network_info: [{"id": "989bf403-079f-46b9-ab79-c645cec393aa", "address": "fa:16:3e:93:20:cd", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989bf403-07", "ovs_interfaceid": "989bf403-079f-46b9-ab79-c645cec393aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.624753] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': task-1016919, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.858690] env[63345]: DEBUG oslo_vmware.api [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1016921, 'name': Rename_Task, 'duration_secs': 0.171171} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.861507] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 735.862054] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c6fa4b94-aeed-4b7c-8ad8-b49be19102bf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.868847] env[63345]: DEBUG oslo_vmware.api [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Waiting for the task: (returnval){ [ 735.868847] env[63345]: value = "task-1016922" [ 735.868847] env[63345]: _type = "Task" [ 735.868847] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.879398] env[63345]: DEBUG oslo_vmware.api [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1016922, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.898087] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524026eb-4f40-e82b-d52b-f711b956ec9c, 'name': SearchDatastore_Task, 'duration_secs': 0.009163} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.902380] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bed8843-cf9f-4a79-a4cc-d7360d9458a8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.906597] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 735.906597] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]521f1eaa-f985-20f6-059d-95d401eeffc2" [ 735.906597] env[63345]: _type = "Task" [ 735.906597] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.915268] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521f1eaa-f985-20f6-059d-95d401eeffc2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.964696] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4effab54-00c3-4146-8811-f4b10f83cae3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.972950] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05dc52b-995e-44b4-9028-a1e498e17904 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.004484] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eadfd31-4b60-41c0-a2e2-6b6f28ef4abe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.011941] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866dfd55-90fd-4b6f-97c0-fadc239b4b34 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.018127] env[63345]: DEBUG nova.compute.utils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 736.019635] env[63345]: DEBUG nova.compute.manager [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 736.019823] env[63345]: DEBUG nova.network.neutron [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 736.031789] env[63345]: DEBUG nova.compute.provider_tree [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.067859] env[63345]: DEBUG oslo_concurrency.lockutils [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] Releasing lock "refresh_cache-85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.068161] env[63345]: DEBUG nova.compute.manager [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Received event network-changed-4ea770ff-4619-4df2-b09f-53b1fdc250e5 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 736.068411] env[63345]: DEBUG nova.compute.manager [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Refreshing instance network info cache due to event network-changed-4ea770ff-4619-4df2-b09f-53b1fdc250e5. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 736.068637] env[63345]: DEBUG oslo_concurrency.lockutils [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] Acquiring lock "refresh_cache-4a59b565-571f-48ef-97bd-bed9853e2d8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.068809] env[63345]: DEBUG oslo_concurrency.lockutils [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] Acquired lock "refresh_cache-4a59b565-571f-48ef-97bd-bed9853e2d8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.069014] env[63345]: DEBUG nova.network.neutron [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Refreshing network info cache for port 4ea770ff-4619-4df2-b09f-53b1fdc250e5 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 736.116966] env[63345]: DEBUG nova.policy [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6b1f43cd2c214d59a42426d0f96d42f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72dcecd3bd674fa2bab4034f63305497', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 736.120830] env[63345]: DEBUG oslo_vmware.api [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': task-1016919, 'name': PowerOnVM_Task, 'duration_secs': 1.049825} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.121095] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 736.121303] env[63345]: INFO nova.compute.manager [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Took 9.48 seconds to spawn the instance on the hypervisor. [ 736.121487] env[63345]: DEBUG nova.compute.manager [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 736.122300] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc352d74-21d6-4722-bde2-61fad2267e41 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.291467] env[63345]: DEBUG nova.compute.manager [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 736.297356] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb04326-07ea-4a8d-8360-c89037d88345 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.381955] env[63345]: DEBUG oslo_vmware.api [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1016922, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.418790] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521f1eaa-f985-20f6-059d-95d401eeffc2, 'name': SearchDatastore_Task, 'duration_secs': 0.009388} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.419056] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.419314] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 4a59b565-571f-48ef-97bd-bed9853e2d8e/4a59b565-571f-48ef-97bd-bed9853e2d8e.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 736.419564] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fda18c45-bd62-4e63-879f-7c4162e25cd8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.428524] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 736.428524] env[63345]: value = "task-1016923" [ 736.428524] env[63345]: _type = "Task" [ 736.428524] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.437652] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1016923, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.494020] env[63345]: DEBUG nova.network.neutron [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Updating instance_info_cache with network_info: [{"id": "35f00929-4dc1-4515-b0de-19a6377c68ca", "address": "fa:16:3e:9e:c6:f2", "network": {"id": "5159b9e8-dfb2-472c-bec6-f963867f9baf", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-134143484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be4b8982dd144c969cb530f52ed9297b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35f00929-4d", "ovs_interfaceid": "35f00929-4dc1-4515-b0de-19a6377c68ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.523530] env[63345]: DEBUG nova.compute.manager [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 736.536061] env[63345]: DEBUG nova.scheduler.client.report [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 736.642790] env[63345]: INFO nova.compute.manager [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Took 36.09 seconds to build instance. [ 736.693731] env[63345]: DEBUG nova.network.neutron [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Successfully created port: 87ece051-f9a8-483b-bc74-1e13e76bdd75 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 736.809434] env[63345]: INFO nova.compute.manager [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] instance snapshotting [ 736.812895] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5906783-1cbb-43ca-8210-26321be13da9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.844334] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67429e19-1741-450d-9eaa-92dc1a913083 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.880941] env[63345]: DEBUG oslo_vmware.api [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1016922, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.939261] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1016923, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.976619] env[63345]: DEBUG nova.network.neutron [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Updated VIF entry in instance network info cache for port 4ea770ff-4619-4df2-b09f-53b1fdc250e5. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 736.977129] env[63345]: DEBUG nova.network.neutron [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Updating instance_info_cache with network_info: [{"id": "4ea770ff-4619-4df2-b09f-53b1fdc250e5", "address": "fa:16:3e:1e:ce:1c", "network": {"id": "441f27c7-de99-494b-9db5-8e67e3c8e7b6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-592603355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f0343855b6147f38b0cb3f2c72330e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ea770ff-46", "ovs_interfaceid": "4ea770ff-4619-4df2-b09f-53b1fdc250e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.997678] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Releasing lock "refresh_cache-bc9d2e6a-f77a-4a21-90bc-81949cbfce91" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.997901] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Updated the network info_cache for instance {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 736.999035] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.999035] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.999035] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.999221] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.999380] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.999947] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.999947] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63345) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 736.999947] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager.update_available_resource {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 737.040915] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.551s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.041525] env[63345]: DEBUG nova.compute.manager [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 737.044046] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.470s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.044279] env[63345]: DEBUG nova.objects.instance [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Lazy-loading 'resources' on Instance uuid 30755716-03a7-41bd-90c2-7ef21baf9975 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 737.144935] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4bcbe1f5-02dc-4946-a315-7e1e66d5fa16 tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Lock "64fcf837-1d9d-41b1-a2a1-3c16362932cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.767s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.355758] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Creating Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 737.356227] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a2d3f920-553f-4dc2-a5dc-3619a632e983 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.366111] env[63345]: DEBUG oslo_vmware.api [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 737.366111] env[63345]: value = "task-1016924" [ 737.366111] env[63345]: _type = "Task" [ 737.366111] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.376322] env[63345]: DEBUG oslo_vmware.api [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016924, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.382522] env[63345]: DEBUG oslo_vmware.api [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1016922, 'name': PowerOnVM_Task, 'duration_secs': 1.25904} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.383981] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 737.383981] env[63345]: INFO nova.compute.manager [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Took 6.06 seconds to spawn the instance on the hypervisor. [ 737.383981] env[63345]: DEBUG nova.compute.manager [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 737.385453] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917548f5-9c47-4984-9e0a-5f480114a789 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.441717] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1016923, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.545295} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.442113] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 4a59b565-571f-48ef-97bd-bed9853e2d8e/4a59b565-571f-48ef-97bd-bed9853e2d8e.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 737.442336] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 737.442623] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0cca91f0-2441-447f-8c10-1857d77a8507 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.451337] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 737.451337] env[63345]: value = "task-1016925" [ 737.451337] env[63345]: _type = "Task" [ 737.451337] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.464637] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1016925, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.479829] env[63345]: DEBUG oslo_concurrency.lockutils [req-d41e5491-2f18-4375-a33d-3cbc059a388a req-38d9fdba-a085-453f-b925-74e9712b8f49 service nova] Releasing lock "refresh_cache-4a59b565-571f-48ef-97bd-bed9853e2d8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.503219] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.533683] env[63345]: DEBUG nova.compute.manager [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 737.548363] env[63345]: DEBUG nova.compute.utils [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 737.553525] env[63345]: DEBUG nova.compute.manager [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 737.553525] env[63345]: DEBUG nova.network.neutron [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 737.564973] env[63345]: DEBUG nova.virt.hardware [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 737.565276] env[63345]: DEBUG nova.virt.hardware [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 737.565486] env[63345]: DEBUG nova.virt.hardware [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.565700] env[63345]: DEBUG nova.virt.hardware [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 737.565911] env[63345]: DEBUG nova.virt.hardware [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.566140] env[63345]: DEBUG nova.virt.hardware [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 737.566373] env[63345]: DEBUG nova.virt.hardware [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 737.566536] env[63345]: DEBUG nova.virt.hardware [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 737.566703] env[63345]: DEBUG nova.virt.hardware [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 737.566894] env[63345]: DEBUG nova.virt.hardware [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 737.567105] env[63345]: DEBUG nova.virt.hardware [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 737.567959] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2f8d65-f9f6-4d5b-81f0-62622660cb3a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.579188] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82376ee-a83a-422e-baf7-e1442df970e3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.604136] env[63345]: DEBUG nova.policy [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6055500166344214a404427722503338', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dfc1248fb5ee4f798b6c59154d4cf623', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 737.648524] env[63345]: DEBUG nova.compute.manager [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 737.877104] env[63345]: DEBUG oslo_vmware.api [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016924, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.905152] env[63345]: INFO nova.compute.manager [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Took 32.69 seconds to build instance. [ 737.966277] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1016925, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068281} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.967164] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 737.967444] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7557b8a5-e0c3-47fc-a267-d1a0e93b4562 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.991452] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] 4a59b565-571f-48ef-97bd-bed9853e2d8e/4a59b565-571f-48ef-97bd-bed9853e2d8e.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 737.994669] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce5bc0ac-7459-483a-9288-4f632fd5c3a6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.019021] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 738.019021] env[63345]: value = "task-1016926" [ 738.019021] env[63345]: _type = "Task" [ 738.019021] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.031748] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1016926, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.054047] env[63345]: DEBUG nova.compute.manager [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 738.104615] env[63345]: DEBUG nova.network.neutron [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Successfully created port: be445772-8a21-4213-b9d3-8852ba3c12ef {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 738.117362] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b70de3-3f98-4dfa-ae04-c20c552a1df7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.126021] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e1dae29-bd96-4651-99ba-8ad40e81a64d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.164434] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af1e83c-a8aa-40b5-a233-a59704bcfea5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.176175] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf2830e-81f1-4f8d-bfdc-3271fdd40d75 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.192342] env[63345]: DEBUG nova.compute.provider_tree [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.196112] env[63345]: DEBUG oslo_concurrency.lockutils [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.383628] env[63345]: DEBUG oslo_vmware.api [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016924, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.406633] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d0b1cd44-5d00-4371-827d-17b8856bf22c tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Lock "3b0d115d-dad5-4881-a0e0-b98f555da533" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.009s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.530266] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1016926, 'name': ReconfigVM_Task, 'duration_secs': 0.312828} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.530994] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Reconfigured VM instance instance-00000032 to attach disk [datastore2] 4a59b565-571f-48ef-97bd-bed9853e2d8e/4a59b565-571f-48ef-97bd-bed9853e2d8e.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 738.533603] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-118c397f-6588-4b28-81b6-59760b3788bd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.541324] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 738.541324] env[63345]: value = "task-1016927" [ 738.541324] env[63345]: _type = "Task" [ 738.541324] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.553897] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1016927, 'name': Rename_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.698272] env[63345]: DEBUG nova.scheduler.client.report [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 738.880590] env[63345]: DEBUG oslo_vmware.api [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016924, 'name': CreateSnapshot_Task, 'duration_secs': 1.147925} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.881091] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Created Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 738.882074] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e738f0-eef0-4b71-af32-4af52a9e27c2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.911545] env[63345]: DEBUG nova.compute.manager [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 738.972925] env[63345]: DEBUG nova.compute.manager [req-8e0723d9-f6b9-4acd-aacb-12c9f2e8781a req-251c6477-780f-41b2-8ae3-ea09ff22e765 service nova] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Received event network-vif-plugged-87ece051-f9a8-483b-bc74-1e13e76bdd75 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 738.973168] env[63345]: DEBUG oslo_concurrency.lockutils [req-8e0723d9-f6b9-4acd-aacb-12c9f2e8781a req-251c6477-780f-41b2-8ae3-ea09ff22e765 service nova] Acquiring lock "d3e99100-f13f-4019-9b5a-adaa65dacc5f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.973368] env[63345]: DEBUG oslo_concurrency.lockutils [req-8e0723d9-f6b9-4acd-aacb-12c9f2e8781a req-251c6477-780f-41b2-8ae3-ea09ff22e765 service nova] Lock "d3e99100-f13f-4019-9b5a-adaa65dacc5f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.973540] env[63345]: DEBUG oslo_concurrency.lockutils [req-8e0723d9-f6b9-4acd-aacb-12c9f2e8781a req-251c6477-780f-41b2-8ae3-ea09ff22e765 service nova] Lock "d3e99100-f13f-4019-9b5a-adaa65dacc5f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.973704] env[63345]: DEBUG nova.compute.manager [req-8e0723d9-f6b9-4acd-aacb-12c9f2e8781a req-251c6477-780f-41b2-8ae3-ea09ff22e765 service nova] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] No waiting events found dispatching network-vif-plugged-87ece051-f9a8-483b-bc74-1e13e76bdd75 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 738.973868] env[63345]: WARNING nova.compute.manager [req-8e0723d9-f6b9-4acd-aacb-12c9f2e8781a req-251c6477-780f-41b2-8ae3-ea09ff22e765 service nova] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Received unexpected event network-vif-plugged-87ece051-f9a8-483b-bc74-1e13e76bdd75 for instance with vm_state building and task_state spawning. [ 739.054736] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1016927, 'name': Rename_Task, 'duration_secs': 0.143895} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.054736] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 739.054736] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1f89244-b103-4c30-9afa-6c815773164c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.061600] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 739.061600] env[63345]: value = "task-1016928" [ 739.061600] env[63345]: _type = "Task" [ 739.061600] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.069176] env[63345]: DEBUG nova.compute.manager [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 739.078147] env[63345]: DEBUG nova.network.neutron [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Successfully updated port: 87ece051-f9a8-483b-bc74-1e13e76bdd75 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 739.085318] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1016928, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.108991] env[63345]: DEBUG nova.virt.hardware [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 739.109452] env[63345]: DEBUG nova.virt.hardware [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 739.109686] env[63345]: DEBUG nova.virt.hardware [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 739.109935] env[63345]: DEBUG nova.virt.hardware [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 739.110151] env[63345]: DEBUG nova.virt.hardware [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 739.110399] env[63345]: DEBUG nova.virt.hardware [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 739.110675] env[63345]: DEBUG nova.virt.hardware [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 739.110893] env[63345]: DEBUG nova.virt.hardware [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 739.111141] env[63345]: DEBUG nova.virt.hardware [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 739.111395] env[63345]: DEBUG nova.virt.hardware [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 739.111652] env[63345]: DEBUG nova.virt.hardware [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 739.113466] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9883613-bc64-4a78-a4e2-5b3de86259e2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.123080] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa00be4-cc8b-4541-b072-fcb91fe3d0cb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.203168] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.159s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.207455] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 20.260s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.234356] env[63345]: INFO nova.scheduler.client.report [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Deleted allocations for instance 30755716-03a7-41bd-90c2-7ef21baf9975 [ 739.405985] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Creating linked-clone VM from snapshot {{(pid=63345) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 739.406392] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7372ea94-44a8-487a-b568-97af163f2551 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.421253] env[63345]: DEBUG oslo_vmware.api [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 739.421253] env[63345]: value = "task-1016929" [ 739.421253] env[63345]: _type = "Task" [ 739.421253] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.428613] env[63345]: DEBUG oslo_vmware.api [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016929, 'name': CloneVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.432682] env[63345]: DEBUG oslo_concurrency.lockutils [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.575706] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1016928, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.585045] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Acquiring lock "refresh_cache-d3e99100-f13f-4019-9b5a-adaa65dacc5f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.585231] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Acquired lock "refresh_cache-d3e99100-f13f-4019-9b5a-adaa65dacc5f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.585429] env[63345]: DEBUG nova.network.neutron [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 739.710385] env[63345]: DEBUG nova.objects.instance [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lazy-loading 'migration_context' on Instance uuid 27c6dc17-4ded-4fe7-8fba-265eae64fc32 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 739.726510] env[63345]: DEBUG nova.network.neutron [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Successfully updated port: be445772-8a21-4213-b9d3-8852ba3c12ef {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 739.745398] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2672da66-43f2-4cdb-9d1e-327963356641 tempest-ServerAddressesTestJSON-1215525429 tempest-ServerAddressesTestJSON-1215525429-project-member] Lock "30755716-03a7-41bd-90c2-7ef21baf9975" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.880s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.805113] env[63345]: DEBUG nova.compute.manager [req-ff761bea-eafd-4cbd-af0e-7fdf8865e8c4 req-f3c26632-2fbc-438c-bcc7-2f9a034cc183 service nova] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Received event network-vif-plugged-be445772-8a21-4213-b9d3-8852ba3c12ef {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 739.805113] env[63345]: DEBUG oslo_concurrency.lockutils [req-ff761bea-eafd-4cbd-af0e-7fdf8865e8c4 req-f3c26632-2fbc-438c-bcc7-2f9a034cc183 service nova] Acquiring lock "6cbe136b-5bf6-4f17-bcef-b712d850615f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.805623] env[63345]: DEBUG oslo_concurrency.lockutils [req-ff761bea-eafd-4cbd-af0e-7fdf8865e8c4 req-f3c26632-2fbc-438c-bcc7-2f9a034cc183 service nova] Lock "6cbe136b-5bf6-4f17-bcef-b712d850615f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.805623] env[63345]: DEBUG oslo_concurrency.lockutils [req-ff761bea-eafd-4cbd-af0e-7fdf8865e8c4 req-f3c26632-2fbc-438c-bcc7-2f9a034cc183 service nova] Lock "6cbe136b-5bf6-4f17-bcef-b712d850615f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.805703] env[63345]: DEBUG nova.compute.manager [req-ff761bea-eafd-4cbd-af0e-7fdf8865e8c4 req-f3c26632-2fbc-438c-bcc7-2f9a034cc183 service nova] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] No waiting events found dispatching network-vif-plugged-be445772-8a21-4213-b9d3-8852ba3c12ef {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 739.805863] env[63345]: WARNING nova.compute.manager [req-ff761bea-eafd-4cbd-af0e-7fdf8865e8c4 req-f3c26632-2fbc-438c-bcc7-2f9a034cc183 service nova] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Received unexpected event network-vif-plugged-be445772-8a21-4213-b9d3-8852ba3c12ef for instance with vm_state building and task_state spawning. [ 739.932626] env[63345]: DEBUG oslo_vmware.api [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016929, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.074042] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1016928, 'name': PowerOnVM_Task} progress is 90%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.121103] env[63345]: DEBUG nova.network.neutron [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 740.234988] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "refresh_cache-6cbe136b-5bf6-4f17-bcef-b712d850615f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.234988] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquired lock "refresh_cache-6cbe136b-5bf6-4f17-bcef-b712d850615f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.234988] env[63345]: DEBUG nova.network.neutron [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 740.289720] env[63345]: DEBUG nova.network.neutron [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Updating instance_info_cache with network_info: [{"id": "87ece051-f9a8-483b-bc74-1e13e76bdd75", "address": "fa:16:3e:58:44:e9", "network": {"id": "52ba8cc4-5073-473c-b5ad-a03898b2857c", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1954790933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72dcecd3bd674fa2bab4034f63305497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87ece051-f9", "ovs_interfaceid": "87ece051-f9a8-483b-bc74-1e13e76bdd75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.376920] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Acquiring lock "cb712d80-be78-4c19-a891-329011521f30" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.377215] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Lock "cb712d80-be78-4c19-a891-329011521f30" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.433569] env[63345]: DEBUG oslo_vmware.api [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016929, 'name': CloneVM_Task} progress is 95%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.578445] env[63345]: DEBUG oslo_vmware.api [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1016928, 'name': PowerOnVM_Task, 'duration_secs': 1.447416} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.578726] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 740.578924] env[63345]: INFO nova.compute.manager [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Took 8.76 seconds to spawn the instance on the hypervisor. [ 740.579632] env[63345]: DEBUG nova.compute.manager [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 740.580388] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80de3d6-9b51-4e55-ab21-71bb0f12730e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.727588] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-022cb026-fe67-4b23-9573-4ae0fe9dc595 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.737839] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6206da5-4971-4007-b690-66fd7ac45c8d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.775355] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d210d0e-ce93-4b02-85b5-e7494b00111a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.780796] env[63345]: DEBUG nova.network.neutron [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 740.789027] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b55ef8-2cb4-4d59-b3ec-6ae6c432b51c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.797112] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Releasing lock "refresh_cache-d3e99100-f13f-4019-9b5a-adaa65dacc5f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.797112] env[63345]: DEBUG nova.compute.manager [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Instance network_info: |[{"id": "87ece051-f9a8-483b-bc74-1e13e76bdd75", "address": "fa:16:3e:58:44:e9", "network": {"id": "52ba8cc4-5073-473c-b5ad-a03898b2857c", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1954790933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72dcecd3bd674fa2bab4034f63305497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87ece051-f9", "ovs_interfaceid": "87ece051-f9a8-483b-bc74-1e13e76bdd75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 740.797506] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:44:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4b033f4d-2e92-4702-add6-410a29d3f251', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87ece051-f9a8-483b-bc74-1e13e76bdd75', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 740.802532] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Creating folder: Project (72dcecd3bd674fa2bab4034f63305497). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 740.805954] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e4fe2ec-4c0c-4eab-993c-c26540acf8ca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.821027] env[63345]: DEBUG nova.compute.provider_tree [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.828558] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Created folder: Project (72dcecd3bd674fa2bab4034f63305497) in parent group-v225918. [ 740.828879] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Creating folder: Instances. Parent ref: group-v226006. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 740.829148] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-643fbba5-e185-435d-8998-80863d409c5c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.838802] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Created folder: Instances in parent group-v226006. [ 740.839097] env[63345]: DEBUG oslo.service.loopingcall [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 740.841680] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 740.842060] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-520f66f2-a120-44fd-af9e-38d300b07dc2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.862834] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 740.862834] env[63345]: value = "task-1016932" [ 740.862834] env[63345]: _type = "Task" [ 740.862834] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.870740] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016932, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.931846] env[63345]: DEBUG oslo_vmware.api [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016929, 'name': CloneVM_Task, 'duration_secs': 1.449053} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.932125] env[63345]: INFO nova.virt.vmwareapi.vmops [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Created linked-clone VM from snapshot [ 740.932856] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce44ecd-a0cc-4098-a760-724ea3e42f8d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.940112] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Uploading image be13e0bd-c01a-4f6f-8181-45db0c091d1c {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 740.952820] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Destroying the VM {{(pid=63345) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 740.953121] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9ad08a5c-7cf0-4436-b50a-62f19c1d4d06 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.957289] env[63345]: DEBUG nova.network.neutron [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Updating instance_info_cache with network_info: [{"id": "be445772-8a21-4213-b9d3-8852ba3c12ef", "address": "fa:16:3e:b4:1d:16", "network": {"id": "6adcb593-15d5-4959-9e09-f7794e033f9e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1117018512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfc1248fb5ee4f798b6c59154d4cf623", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe445772-8a", "ovs_interfaceid": "be445772-8a21-4213-b9d3-8852ba3c12ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.959777] env[63345]: DEBUG oslo_vmware.api [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 740.959777] env[63345]: value = "task-1016933" [ 740.959777] env[63345]: _type = "Task" [ 740.959777] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.969638] env[63345]: DEBUG oslo_vmware.api [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016933, 'name': Destroy_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.102940] env[63345]: INFO nova.compute.manager [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Took 35.17 seconds to build instance. [ 741.227378] env[63345]: DEBUG nova.compute.manager [req-68128d0f-2568-488d-be76-981962bbeba4 req-0a3c5479-9f37-4f1a-a47b-9e10ddcc852b service nova] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Received event network-changed-87ece051-f9a8-483b-bc74-1e13e76bdd75 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 741.228034] env[63345]: DEBUG nova.compute.manager [req-68128d0f-2568-488d-be76-981962bbeba4 req-0a3c5479-9f37-4f1a-a47b-9e10ddcc852b service nova] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Refreshing instance network info cache due to event network-changed-87ece051-f9a8-483b-bc74-1e13e76bdd75. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 741.228627] env[63345]: DEBUG oslo_concurrency.lockutils [req-68128d0f-2568-488d-be76-981962bbeba4 req-0a3c5479-9f37-4f1a-a47b-9e10ddcc852b service nova] Acquiring lock "refresh_cache-d3e99100-f13f-4019-9b5a-adaa65dacc5f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.228984] env[63345]: DEBUG oslo_concurrency.lockutils [req-68128d0f-2568-488d-be76-981962bbeba4 req-0a3c5479-9f37-4f1a-a47b-9e10ddcc852b service nova] Acquired lock "refresh_cache-d3e99100-f13f-4019-9b5a-adaa65dacc5f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.229332] env[63345]: DEBUG nova.network.neutron [req-68128d0f-2568-488d-be76-981962bbeba4 req-0a3c5479-9f37-4f1a-a47b-9e10ddcc852b service nova] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Refreshing network info cache for port 87ece051-f9a8-483b-bc74-1e13e76bdd75 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 741.284408] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Acquiring lock "64fcf837-1d9d-41b1-a2a1-3c16362932cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.284702] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Lock "64fcf837-1d9d-41b1-a2a1-3c16362932cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.284927] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Acquiring lock "64fcf837-1d9d-41b1-a2a1-3c16362932cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.285129] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Lock "64fcf837-1d9d-41b1-a2a1-3c16362932cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.285321] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Lock "64fcf837-1d9d-41b1-a2a1-3c16362932cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.290684] env[63345]: INFO nova.compute.manager [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Terminating instance [ 741.323175] env[63345]: DEBUG nova.scheduler.client.report [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 741.374023] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016932, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.460879] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Releasing lock "refresh_cache-6cbe136b-5bf6-4f17-bcef-b712d850615f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.461277] env[63345]: DEBUG nova.compute.manager [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Instance network_info: |[{"id": "be445772-8a21-4213-b9d3-8852ba3c12ef", "address": "fa:16:3e:b4:1d:16", "network": {"id": "6adcb593-15d5-4959-9e09-f7794e033f9e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1117018512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfc1248fb5ee4f798b6c59154d4cf623", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe445772-8a", "ovs_interfaceid": "be445772-8a21-4213-b9d3-8852ba3c12ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 741.461763] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:1d:16', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '450939f7-f74b-41f7-93f7-b4fde6a6fbed', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'be445772-8a21-4213-b9d3-8852ba3c12ef', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 741.469805] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Creating folder: Project (dfc1248fb5ee4f798b6c59154d4cf623). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 741.473123] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-44671f76-0519-4d4b-b37c-82df7c03690d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.480823] env[63345]: DEBUG oslo_vmware.api [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016933, 'name': Destroy_Task} progress is 33%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.483464] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Created folder: Project (dfc1248fb5ee4f798b6c59154d4cf623) in parent group-v225918. [ 741.483642] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Creating folder: Instances. Parent ref: group-v226009. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 741.483873] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-973ae73f-04db-4d4b-a117-740c337e185d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.492827] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Created folder: Instances in parent group-v226009. [ 741.493072] env[63345]: DEBUG oslo.service.loopingcall [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 741.493261] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 741.493461] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c84de8ba-cc0b-495a-b0c3-e99e0aaf8885 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.514438] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 741.514438] env[63345]: value = "task-1016936" [ 741.514438] env[63345]: _type = "Task" [ 741.514438] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.521937] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016936, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.605375] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af2c3f23-1abe-40b6-9d5f-292c50928eed tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "4a59b565-571f-48ef-97bd-bed9853e2d8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.546s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.799642] env[63345]: DEBUG nova.compute.manager [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 741.799879] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 741.800791] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de299097-6c76-49c1-ba3a-edd44fe36f0f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.810749] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 741.811024] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de06736d-37e2-4e15-a924-63ca197f4109 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.817797] env[63345]: DEBUG oslo_vmware.api [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Waiting for the task: (returnval){ [ 741.817797] env[63345]: value = "task-1016937" [ 741.817797] env[63345]: _type = "Task" [ 741.817797] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.826677] env[63345]: DEBUG oslo_vmware.api [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': task-1016937, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.852923] env[63345]: DEBUG nova.compute.manager [req-f45c907f-cbd7-4b46-aaf4-38c6ec91c166 req-c585b36b-2414-49e4-baa1-d188daa70034 service nova] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Received event network-changed-be445772-8a21-4213-b9d3-8852ba3c12ef {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 741.852923] env[63345]: DEBUG nova.compute.manager [req-f45c907f-cbd7-4b46-aaf4-38c6ec91c166 req-c585b36b-2414-49e4-baa1-d188daa70034 service nova] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Refreshing instance network info cache due to event network-changed-be445772-8a21-4213-b9d3-8852ba3c12ef. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 741.852923] env[63345]: DEBUG oslo_concurrency.lockutils [req-f45c907f-cbd7-4b46-aaf4-38c6ec91c166 req-c585b36b-2414-49e4-baa1-d188daa70034 service nova] Acquiring lock "refresh_cache-6cbe136b-5bf6-4f17-bcef-b712d850615f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.852923] env[63345]: DEBUG oslo_concurrency.lockutils [req-f45c907f-cbd7-4b46-aaf4-38c6ec91c166 req-c585b36b-2414-49e4-baa1-d188daa70034 service nova] Acquired lock "refresh_cache-6cbe136b-5bf6-4f17-bcef-b712d850615f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.852923] env[63345]: DEBUG nova.network.neutron [req-f45c907f-cbd7-4b46-aaf4-38c6ec91c166 req-c585b36b-2414-49e4-baa1-d188daa70034 service nova] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Refreshing network info cache for port be445772-8a21-4213-b9d3-8852ba3c12ef {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 741.875624] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016932, 'name': CreateVM_Task, 'duration_secs': 0.582301} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.876208] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 741.876702] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.876915] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.877249] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 741.878756] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa40d607-7d3d-4f90-9e28-9c8fb6d8f38d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.884914] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Waiting for the task: (returnval){ [ 741.884914] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]526f4448-7897-c874-20bc-2dc1f28db04b" [ 741.884914] env[63345]: _type = "Task" [ 741.884914] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.897300] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526f4448-7897-c874-20bc-2dc1f28db04b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.974730] env[63345]: DEBUG oslo_vmware.api [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016933, 'name': Destroy_Task, 'duration_secs': 0.673405} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.975126] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Destroyed the VM [ 741.975508] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Deleting Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 741.975779] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-20629a67-10c2-4d35-a3d1-9eb99895e528 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.983497] env[63345]: DEBUG oslo_vmware.api [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 741.983497] env[63345]: value = "task-1016938" [ 741.983497] env[63345]: _type = "Task" [ 741.983497] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.994843] env[63345]: DEBUG oslo_vmware.api [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016938, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.027247] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016936, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.070733] env[63345]: DEBUG nova.network.neutron [req-68128d0f-2568-488d-be76-981962bbeba4 req-0a3c5479-9f37-4f1a-a47b-9e10ddcc852b service nova] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Updated VIF entry in instance network info cache for port 87ece051-f9a8-483b-bc74-1e13e76bdd75. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 742.070733] env[63345]: DEBUG nova.network.neutron [req-68128d0f-2568-488d-be76-981962bbeba4 req-0a3c5479-9f37-4f1a-a47b-9e10ddcc852b service nova] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Updating instance_info_cache with network_info: [{"id": "87ece051-f9a8-483b-bc74-1e13e76bdd75", "address": "fa:16:3e:58:44:e9", "network": {"id": "52ba8cc4-5073-473c-b5ad-a03898b2857c", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1954790933-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72dcecd3bd674fa2bab4034f63305497", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87ece051-f9", "ovs_interfaceid": "87ece051-f9a8-483b-bc74-1e13e76bdd75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.108502] env[63345]: DEBUG nova.compute.manager [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 742.333886] env[63345]: DEBUG oslo_vmware.api [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': task-1016937, 'name': PowerOffVM_Task, 'duration_secs': 0.258582} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.334439] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 742.334439] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 742.334702] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4b357de-f000-46c4-a673-431801f2b99f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.340802] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.134s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.349220] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.443s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.350883] env[63345]: INFO nova.compute.claims [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 742.402170] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526f4448-7897-c874-20bc-2dc1f28db04b, 'name': SearchDatastore_Task, 'duration_secs': 0.011854} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.402958] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.403190] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 742.403559] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.403853] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.404147] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 742.404485] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7cb0cdaa-1e7d-4590-b255-3bc69408f714 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.416979] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 742.417294] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 742.418257] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95dddbc3-4243-4292-abae-df212e232ff0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.427572] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Waiting for the task: (returnval){ [ 742.427572] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5243bfa9-7122-2173-3cf8-5f45169b90d0" [ 742.427572] env[63345]: _type = "Task" [ 742.427572] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.441046] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5243bfa9-7122-2173-3cf8-5f45169b90d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.444347] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 742.444721] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 742.445279] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Deleting the datastore file [datastore2] 64fcf837-1d9d-41b1-a2a1-3c16362932cf {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 742.445646] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ced79656-71fc-4406-ac1e-774415faf897 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.452307] env[63345]: DEBUG oslo_vmware.api [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Waiting for the task: (returnval){ [ 742.452307] env[63345]: value = "task-1016940" [ 742.452307] env[63345]: _type = "Task" [ 742.452307] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.461419] env[63345]: DEBUG oslo_vmware.api [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': task-1016940, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.502816] env[63345]: DEBUG oslo_vmware.api [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016938, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.527071] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016936, 'name': CreateVM_Task, 'duration_secs': 0.540327} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.527292] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 742.528032] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.528222] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.529206] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 742.529206] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ead64545-bc0e-4b4c-b26b-287047d84ea5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.534919] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 742.534919] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52559c79-f541-d422-6ef6-f66b8f3fd9a0" [ 742.534919] env[63345]: _type = "Task" [ 742.534919] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.544265] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52559c79-f541-d422-6ef6-f66b8f3fd9a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.572643] env[63345]: DEBUG oslo_concurrency.lockutils [req-68128d0f-2568-488d-be76-981962bbeba4 req-0a3c5479-9f37-4f1a-a47b-9e10ddcc852b service nova] Releasing lock "refresh_cache-d3e99100-f13f-4019-9b5a-adaa65dacc5f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.573074] env[63345]: DEBUG nova.compute.manager [req-68128d0f-2568-488d-be76-981962bbeba4 req-0a3c5479-9f37-4f1a-a47b-9e10ddcc852b service nova] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Received event network-changed-9bf872ef-9bac-41a4-b3eb-319d2572fee6 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 742.573261] env[63345]: DEBUG nova.compute.manager [req-68128d0f-2568-488d-be76-981962bbeba4 req-0a3c5479-9f37-4f1a-a47b-9e10ddcc852b service nova] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Refreshing instance network info cache due to event network-changed-9bf872ef-9bac-41a4-b3eb-319d2572fee6. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 742.573481] env[63345]: DEBUG oslo_concurrency.lockutils [req-68128d0f-2568-488d-be76-981962bbeba4 req-0a3c5479-9f37-4f1a-a47b-9e10ddcc852b service nova] Acquiring lock "refresh_cache-3b0d115d-dad5-4881-a0e0-b98f555da533" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.573691] env[63345]: DEBUG oslo_concurrency.lockutils [req-68128d0f-2568-488d-be76-981962bbeba4 req-0a3c5479-9f37-4f1a-a47b-9e10ddcc852b service nova] Acquired lock "refresh_cache-3b0d115d-dad5-4881-a0e0-b98f555da533" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.573862] env[63345]: DEBUG nova.network.neutron [req-68128d0f-2568-488d-be76-981962bbeba4 req-0a3c5479-9f37-4f1a-a47b-9e10ddcc852b service nova] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Refreshing network info cache for port 9bf872ef-9bac-41a4-b3eb-319d2572fee6 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 742.636073] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.683714] env[63345]: DEBUG nova.network.neutron [req-f45c907f-cbd7-4b46-aaf4-38c6ec91c166 req-c585b36b-2414-49e4-baa1-d188daa70034 service nova] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Updated VIF entry in instance network info cache for port be445772-8a21-4213-b9d3-8852ba3c12ef. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 742.684201] env[63345]: DEBUG nova.network.neutron [req-f45c907f-cbd7-4b46-aaf4-38c6ec91c166 req-c585b36b-2414-49e4-baa1-d188daa70034 service nova] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Updating instance_info_cache with network_info: [{"id": "be445772-8a21-4213-b9d3-8852ba3c12ef", "address": "fa:16:3e:b4:1d:16", "network": {"id": "6adcb593-15d5-4959-9e09-f7794e033f9e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1117018512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfc1248fb5ee4f798b6c59154d4cf623", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe445772-8a", "ovs_interfaceid": "be445772-8a21-4213-b9d3-8852ba3c12ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.939738] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5243bfa9-7122-2173-3cf8-5f45169b90d0, 'name': SearchDatastore_Task, 'duration_secs': 0.014419} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.940609] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbed9bf1-b20b-4ee4-95da-5d2123f65f1d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.946183] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Waiting for the task: (returnval){ [ 742.946183] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52bc3c79-e417-a08f-418d-8dbd377c0e7c" [ 742.946183] env[63345]: _type = "Task" [ 742.946183] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.955763] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52bc3c79-e417-a08f-418d-8dbd377c0e7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.963627] env[63345]: DEBUG oslo_vmware.api [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Task: {'id': task-1016940, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170757} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.963874] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 742.964072] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 742.964252] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 742.964428] env[63345]: INFO nova.compute.manager [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Took 1.16 seconds to destroy the instance on the hypervisor. [ 742.964673] env[63345]: DEBUG oslo.service.loopingcall [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 742.964909] env[63345]: DEBUG nova.compute.manager [-] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 742.965015] env[63345]: DEBUG nova.network.neutron [-] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 742.995993] env[63345]: DEBUG oslo_vmware.api [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016938, 'name': RemoveSnapshot_Task, 'duration_secs': 0.819384} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.995993] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Deleted Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 743.048019] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52559c79-f541-d422-6ef6-f66b8f3fd9a0, 'name': SearchDatastore_Task, 'duration_secs': 0.011773} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.048019] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.048019] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 743.048019] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.048220] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.048220] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 743.048220] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab081a07-85cd-4d33-9596-d033380478eb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.057547] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 743.057736] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 743.058626] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1be17dbb-0a73-4179-8aa3-b258d26ee65e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.066683] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 743.066683] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c019a5-0e79-4f26-1004-519ce21ad3f4" [ 743.066683] env[63345]: _type = "Task" [ 743.066683] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.074493] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c019a5-0e79-4f26-1004-519ce21ad3f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.186977] env[63345]: DEBUG oslo_concurrency.lockutils [req-f45c907f-cbd7-4b46-aaf4-38c6ec91c166 req-c585b36b-2414-49e4-baa1-d188daa70034 service nova] Releasing lock "refresh_cache-6cbe136b-5bf6-4f17-bcef-b712d850615f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.461953] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52bc3c79-e417-a08f-418d-8dbd377c0e7c, 'name': SearchDatastore_Task, 'duration_secs': 0.028386} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.462251] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.462491] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] d3e99100-f13f-4019-9b5a-adaa65dacc5f/d3e99100-f13f-4019-9b5a-adaa65dacc5f.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 743.462748] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-25c81abd-f029-4a98-bc6a-897b7adab0f8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.469438] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Waiting for the task: (returnval){ [ 743.469438] env[63345]: value = "task-1016941" [ 743.469438] env[63345]: _type = "Task" [ 743.469438] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.478152] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': task-1016941, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.504020] env[63345]: WARNING nova.compute.manager [None req-9ff9e066-5206-4b32-aec7-946a708f647c tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Image not found during snapshot: nova.exception.ImageNotFound: Image be13e0bd-c01a-4f6f-8181-45db0c091d1c could not be found. [ 743.507578] env[63345]: DEBUG nova.compute.manager [req-e4a3b011-50eb-4f43-a5ca-1a6758de16c2 req-10425a67-47a2-408c-ac52-c13fbb59587a service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Received event network-changed-4ea770ff-4619-4df2-b09f-53b1fdc250e5 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 743.507578] env[63345]: DEBUG nova.compute.manager [req-e4a3b011-50eb-4f43-a5ca-1a6758de16c2 req-10425a67-47a2-408c-ac52-c13fbb59587a service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Refreshing instance network info cache due to event network-changed-4ea770ff-4619-4df2-b09f-53b1fdc250e5. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 743.507578] env[63345]: DEBUG oslo_concurrency.lockutils [req-e4a3b011-50eb-4f43-a5ca-1a6758de16c2 req-10425a67-47a2-408c-ac52-c13fbb59587a service nova] Acquiring lock "refresh_cache-4a59b565-571f-48ef-97bd-bed9853e2d8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.507578] env[63345]: DEBUG oslo_concurrency.lockutils [req-e4a3b011-50eb-4f43-a5ca-1a6758de16c2 req-10425a67-47a2-408c-ac52-c13fbb59587a service nova] Acquired lock "refresh_cache-4a59b565-571f-48ef-97bd-bed9853e2d8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.507578] env[63345]: DEBUG nova.network.neutron [req-e4a3b011-50eb-4f43-a5ca-1a6758de16c2 req-10425a67-47a2-408c-ac52-c13fbb59587a service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Refreshing network info cache for port 4ea770ff-4619-4df2-b09f-53b1fdc250e5 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 743.520448] env[63345]: DEBUG nova.network.neutron [req-68128d0f-2568-488d-be76-981962bbeba4 req-0a3c5479-9f37-4f1a-a47b-9e10ddcc852b service nova] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Updated VIF entry in instance network info cache for port 9bf872ef-9bac-41a4-b3eb-319d2572fee6. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 743.520814] env[63345]: DEBUG nova.network.neutron [req-68128d0f-2568-488d-be76-981962bbeba4 req-0a3c5479-9f37-4f1a-a47b-9e10ddcc852b service nova] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Updating instance_info_cache with network_info: [{"id": "9bf872ef-9bac-41a4-b3eb-319d2572fee6", "address": "fa:16:3e:e6:33:e6", "network": {"id": "a44daa6a-6666-4277-911d-306e7f499492", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1563048768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6bcd39225b4bc5b1ac79111b46dd9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b399c74-1411-408a-b4cd-84e268ae83fe", "external-id": "nsx-vlan-transportzone-486", "segmentation_id": 486, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bf872ef-9b", "ovs_interfaceid": "9bf872ef-9bac-41a4-b3eb-319d2572fee6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.578232] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c019a5-0e79-4f26-1004-519ce21ad3f4, 'name': SearchDatastore_Task, 'duration_secs': 0.051095} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.579201] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05221217-c60c-4f65-a52e-8a8182000aea {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.592172] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 743.592172] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d87e26-898f-5082-024f-2443236174c8" [ 743.592172] env[63345]: _type = "Task" [ 743.592172] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.601213] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d87e26-898f-5082-024f-2443236174c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.804195] env[63345]: DEBUG nova.network.neutron [-] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.827892] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57a8214-caec-4d5c-beef-037c5f2e374a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.835781] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ebfe42-6dfe-46e2-9f2f-76b19efed9d4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.869204] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa0e979-6420-4efe-9eeb-58294873ae18 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.878035] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34bf5522-b762-497a-9d25-e0a4a64aae27 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.894751] env[63345]: DEBUG nova.compute.provider_tree [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 743.896673] env[63345]: INFO nova.compute.manager [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Swapping old allocation on dict_keys(['fc35ddde-c15e-4ab8-bf77-a06ae0805b57']) held by migration ed7f0ba8-ef84-42aa-81f6-263f46fbef39 for instance [ 743.923566] env[63345]: DEBUG nova.scheduler.client.report [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Overwriting current allocation {'allocations': {'fc35ddde-c15e-4ab8-bf77-a06ae0805b57': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 77}}, 'project_id': '80677040e91647d9afae9c71c48ed3f0', 'user_id': '17d05413415247e784585aaa367481eb', 'consumer_generation': 1} on consumer 27c6dc17-4ded-4fe7-8fba-265eae64fc32 {{(pid=63345) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2033}} [ 743.981496] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': task-1016941, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.988830] env[63345]: DEBUG nova.compute.manager [req-0fad3e76-8b9b-431d-9b94-5fc1da850047 req-83c9d1c5-f888-4c29-ab3f-24ca286dff89 service nova] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Received event network-vif-deleted-6a41176d-7c10-4226-8332-eff6eea91574 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 744.023745] env[63345]: DEBUG oslo_concurrency.lockutils [req-68128d0f-2568-488d-be76-981962bbeba4 req-0a3c5479-9f37-4f1a-a47b-9e10ddcc852b service nova] Releasing lock "refresh_cache-3b0d115d-dad5-4881-a0e0-b98f555da533" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.040169] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "refresh_cache-27c6dc17-4ded-4fe7-8fba-265eae64fc32" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.040169] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquired lock "refresh_cache-27c6dc17-4ded-4fe7-8fba-265eae64fc32" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.040169] env[63345]: DEBUG nova.network.neutron [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 744.103183] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d87e26-898f-5082-024f-2443236174c8, 'name': SearchDatastore_Task, 'duration_secs': 0.044021} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.103445] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.103699] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 6cbe136b-5bf6-4f17-bcef-b712d850615f/6cbe136b-5bf6-4f17-bcef-b712d850615f.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 744.103953] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cae6f1f1-4d64-4818-8bda-970bd8f3d1ab {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.110724] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 744.110724] env[63345]: value = "task-1016942" [ 744.110724] env[63345]: _type = "Task" [ 744.110724] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.121891] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016942, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.253344] env[63345]: DEBUG nova.network.neutron [req-e4a3b011-50eb-4f43-a5ca-1a6758de16c2 req-10425a67-47a2-408c-ac52-c13fbb59587a service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Updated VIF entry in instance network info cache for port 4ea770ff-4619-4df2-b09f-53b1fdc250e5. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 744.253773] env[63345]: DEBUG nova.network.neutron [req-e4a3b011-50eb-4f43-a5ca-1a6758de16c2 req-10425a67-47a2-408c-ac52-c13fbb59587a service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Updating instance_info_cache with network_info: [{"id": "4ea770ff-4619-4df2-b09f-53b1fdc250e5", "address": "fa:16:3e:1e:ce:1c", "network": {"id": "441f27c7-de99-494b-9db5-8e67e3c8e7b6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-592603355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f0343855b6147f38b0cb3f2c72330e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ea770ff-46", "ovs_interfaceid": "4ea770ff-4619-4df2-b09f-53b1fdc250e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.307081] env[63345]: INFO nova.compute.manager [-] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Took 1.34 seconds to deallocate network for instance. [ 744.399698] env[63345]: DEBUG nova.scheduler.client.report [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 744.481658] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': task-1016941, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.634864] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016942, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.756600] env[63345]: DEBUG oslo_concurrency.lockutils [req-e4a3b011-50eb-4f43-a5ca-1a6758de16c2 req-10425a67-47a2-408c-ac52-c13fbb59587a service nova] Releasing lock "refresh_cache-4a59b565-571f-48ef-97bd-bed9853e2d8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.777188] env[63345]: DEBUG nova.network.neutron [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Updating instance_info_cache with network_info: [{"id": "8c1bd582-6867-4cba-9522-0e03560fa3f7", "address": "fa:16:3e:3d:4f:aa", "network": {"id": "18285fd9-d154-415c-acbb-1494303e3b6c", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5dc99cc64e6c4d83928b309253a8df8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c1bd582-68", "ovs_interfaceid": "8c1bd582-6867-4cba-9522-0e03560fa3f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.814506] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.904784] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.556s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.905408] env[63345]: DEBUG nova.compute.manager [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 744.908636] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.281s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.908941] env[63345]: DEBUG nova.objects.instance [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lazy-loading 'resources' on Instance uuid 0d5cb238-2d25-47b1-8ce6-15a20836dbfb {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 744.983447] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': task-1016941, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.053206} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.983781] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] d3e99100-f13f-4019-9b5a-adaa65dacc5f/d3e99100-f13f-4019-9b5a-adaa65dacc5f.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 744.984058] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 744.984363] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-977e9984-c13e-4bcc-97a0-01a471797458 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.988477] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "3101726f-5b14-417e-bcf8-390ce1f9b467" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.988477] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "3101726f-5b14-417e-bcf8-390ce1f9b467" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.988477] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "3101726f-5b14-417e-bcf8-390ce1f9b467-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.988477] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "3101726f-5b14-417e-bcf8-390ce1f9b467-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.988766] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "3101726f-5b14-417e-bcf8-390ce1f9b467-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.990779] env[63345]: INFO nova.compute.manager [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Terminating instance [ 744.995586] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Waiting for the task: (returnval){ [ 744.995586] env[63345]: value = "task-1016943" [ 744.995586] env[63345]: _type = "Task" [ 744.995586] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.007202] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': task-1016943, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.128587] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016942, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.945315} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.129172] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 6cbe136b-5bf6-4f17-bcef-b712d850615f/6cbe136b-5bf6-4f17-bcef-b712d850615f.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 745.129172] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 745.129390] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6fcd5b53-2add-400e-b1e8-510082ad5f28 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.137736] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 745.137736] env[63345]: value = "task-1016944" [ 745.137736] env[63345]: _type = "Task" [ 745.137736] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.151883] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016944, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.279517] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Releasing lock "refresh_cache-27c6dc17-4ded-4fe7-8fba-265eae64fc32" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.279985] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 745.280281] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25f6f2cd-af5a-44a0-b2b8-e130757e5052 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.287130] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 745.287130] env[63345]: value = "task-1016945" [ 745.287130] env[63345]: _type = "Task" [ 745.287130] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.295532] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016945, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.412459] env[63345]: DEBUG nova.compute.utils [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 745.421278] env[63345]: DEBUG nova.compute.manager [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 745.421623] env[63345]: DEBUG nova.network.neutron [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 745.481870] env[63345]: DEBUG nova.policy [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27fc4f99c7f44b1ea421bd8f13de6e43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '964cee117b3c4601b3afe82a8bb9c23e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 745.497825] env[63345]: DEBUG nova.compute.manager [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 745.498127] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 745.499326] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f494a3-8076-4145-82a8-a6e20c667415 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.514394] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 745.514777] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': task-1016943, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095111} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.514973] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9695434d-4de7-4610-bf61-d46c85a737c7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.516941] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 745.517178] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34fdaa14-4c7c-49c8-ab7b-8265a2c0034b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.548231] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] d3e99100-f13f-4019-9b5a-adaa65dacc5f/d3e99100-f13f-4019-9b5a-adaa65dacc5f.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 745.553334] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2e263d0-e118-4ab7-bf5c-30c25f92b31b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.569375] env[63345]: DEBUG oslo_vmware.api [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 745.569375] env[63345]: value = "task-1016946" [ 745.569375] env[63345]: _type = "Task" [ 745.569375] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.575653] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Waiting for the task: (returnval){ [ 745.575653] env[63345]: value = "task-1016947" [ 745.575653] env[63345]: _type = "Task" [ 745.575653] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.578884] env[63345]: DEBUG oslo_vmware.api [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016946, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.592015] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': task-1016947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.650459] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016944, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.235679} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.651190] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 745.653585] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8191d70-1f10-4fa1-8480-20c86ca786d4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.656861] env[63345]: DEBUG nova.compute.manager [req-136c86a5-2201-48d3-af4a-82454de11aff req-5f043b5a-1a6f-4e1f-9aa8-5d5c8c2639c0 service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Received event network-changed-4ea770ff-4619-4df2-b09f-53b1fdc250e5 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 745.657148] env[63345]: DEBUG nova.compute.manager [req-136c86a5-2201-48d3-af4a-82454de11aff req-5f043b5a-1a6f-4e1f-9aa8-5d5c8c2639c0 service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Refreshing instance network info cache due to event network-changed-4ea770ff-4619-4df2-b09f-53b1fdc250e5. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 745.657331] env[63345]: DEBUG oslo_concurrency.lockutils [req-136c86a5-2201-48d3-af4a-82454de11aff req-5f043b5a-1a6f-4e1f-9aa8-5d5c8c2639c0 service nova] Acquiring lock "refresh_cache-4a59b565-571f-48ef-97bd-bed9853e2d8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.657413] env[63345]: DEBUG oslo_concurrency.lockutils [req-136c86a5-2201-48d3-af4a-82454de11aff req-5f043b5a-1a6f-4e1f-9aa8-5d5c8c2639c0 service nova] Acquired lock "refresh_cache-4a59b565-571f-48ef-97bd-bed9853e2d8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.657653] env[63345]: DEBUG nova.network.neutron [req-136c86a5-2201-48d3-af4a-82454de11aff req-5f043b5a-1a6f-4e1f-9aa8-5d5c8c2639c0 service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Refreshing network info cache for port 4ea770ff-4619-4df2-b09f-53b1fdc250e5 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 745.685587] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 6cbe136b-5bf6-4f17-bcef-b712d850615f/6cbe136b-5bf6-4f17-bcef-b712d850615f.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 745.688182] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70f12b29-cf8e-4f7e-9637-23a9eadb108c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.710590] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 745.710590] env[63345]: value = "task-1016948" [ 745.710590] env[63345]: _type = "Task" [ 745.710590] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.720022] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016948, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.795407] env[63345]: DEBUG nova.network.neutron [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Successfully created port: cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 745.800798] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016945, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.922195] env[63345]: DEBUG nova.compute.manager [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 746.003731] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3274bd2-9c6d-4a81-ae99-07bbe254073a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.012102] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6b9f2c-8251-4271-b2ee-5f0df2579dc9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.045150] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5f33cbc-ec3d-4e46-a9b4-38bdf3fabb96 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.053445] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c0641a-8da6-464d-ab19-46b214372b11 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.067065] env[63345]: DEBUG nova.compute.provider_tree [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.079317] env[63345]: DEBUG oslo_vmware.api [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016946, 'name': PowerOffVM_Task, 'duration_secs': 0.388226} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.079680] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 746.079927] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 746.084091] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3166bedd-d5d8-4d42-adae-621ee5265cb1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.090648] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': task-1016947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.221421] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016948, 'name': ReconfigVM_Task, 'duration_secs': 0.327945} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.224637] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 6cbe136b-5bf6-4f17-bcef-b712d850615f/6cbe136b-5bf6-4f17-bcef-b712d850615f.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 746.225482] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de7bca09-c70f-4ffb-95f9-2e75be065f3c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.232713] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 746.232713] env[63345]: value = "task-1016950" [ 746.232713] env[63345]: _type = "Task" [ 746.232713] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.243973] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016950, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.298810] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016945, 'name': PowerOffVM_Task, 'duration_secs': 0.517649} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.299396] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 746.300155] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='e0f196a9-2434-4e97-8d5f-115ba2c65179',id=36,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-506807631',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 746.300556] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 746.300848] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 746.301160] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 746.301433] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 746.301695] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 746.302020] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 746.302308] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 746.302597] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 746.302913] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 746.303229] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 746.308860] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7fddad7-8fbd-455e-8320-3edeb0552ce0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.321438] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 746.321591] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 746.321746] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Deleting the datastore file [datastore2] 3101726f-5b14-417e-bcf8-390ce1f9b467 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 746.324673] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56b2e5c8-ac7c-4a55-b86b-5abaf1758b06 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.329279] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 746.329279] env[63345]: value = "task-1016951" [ 746.329279] env[63345]: _type = "Task" [ 746.329279] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.333525] env[63345]: DEBUG oslo_vmware.api [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 746.333525] env[63345]: value = "task-1016952" [ 746.333525] env[63345]: _type = "Task" [ 746.333525] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.341513] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016951, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.347592] env[63345]: DEBUG oslo_vmware.api [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016952, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.570809] env[63345]: DEBUG nova.scheduler.client.report [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 746.592926] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': task-1016947, 'name': ReconfigVM_Task, 'duration_secs': 0.7447} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.593646] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Reconfigured VM instance instance-00000033 to attach disk [datastore1] d3e99100-f13f-4019-9b5a-adaa65dacc5f/d3e99100-f13f-4019-9b5a-adaa65dacc5f.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 746.594320] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6ff51a87-11e0-47ea-b0d9-63ba3a3a9e08 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.604578] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Waiting for the task: (returnval){ [ 746.604578] env[63345]: value = "task-1016953" [ 746.604578] env[63345]: _type = "Task" [ 746.604578] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.613540] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': task-1016953, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.616718] env[63345]: DEBUG nova.network.neutron [req-136c86a5-2201-48d3-af4a-82454de11aff req-5f043b5a-1a6f-4e1f-9aa8-5d5c8c2639c0 service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Updated VIF entry in instance network info cache for port 4ea770ff-4619-4df2-b09f-53b1fdc250e5. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 746.616718] env[63345]: DEBUG nova.network.neutron [req-136c86a5-2201-48d3-af4a-82454de11aff req-5f043b5a-1a6f-4e1f-9aa8-5d5c8c2639c0 service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Updating instance_info_cache with network_info: [{"id": "4ea770ff-4619-4df2-b09f-53b1fdc250e5", "address": "fa:16:3e:1e:ce:1c", "network": {"id": "441f27c7-de99-494b-9db5-8e67e3c8e7b6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-592603355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f0343855b6147f38b0cb3f2c72330e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ea770ff-46", "ovs_interfaceid": "4ea770ff-4619-4df2-b09f-53b1fdc250e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.744150] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016950, 'name': Rename_Task, 'duration_secs': 0.329497} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.744419] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 746.744756] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c7dcd68-c087-402c-be3d-f9146558b2f2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.751407] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 746.751407] env[63345]: value = "task-1016954" [ 746.751407] env[63345]: _type = "Task" [ 746.751407] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.760252] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016954, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.841630] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016951, 'name': ReconfigVM_Task, 'duration_secs': 0.337275} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.845019] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af13e08d-cc4a-4ef7-84f5-e8936367854e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.848600] env[63345]: DEBUG oslo_vmware.api [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1016952, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.505752} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.849399] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 746.849777] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 746.850133] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 746.853024] env[63345]: INFO nova.compute.manager [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Took 1.35 seconds to destroy the instance on the hypervisor. [ 746.853024] env[63345]: DEBUG oslo.service.loopingcall [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 746.866248] env[63345]: DEBUG nova.compute.manager [-] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 746.866526] env[63345]: DEBUG nova.network.neutron [-] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 746.872175] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:34:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='e0f196a9-2434-4e97-8d5f-115ba2c65179',id=36,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-506807631',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 746.872175] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 746.872175] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 746.872175] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 746.872323] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 746.872323] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 746.872323] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 746.872323] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 746.872323] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 746.872469] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 746.872469] env[63345]: DEBUG nova.virt.hardware [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 746.872469] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dabc1b21-9de2-48ce-8b1a-7c05455a2707 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.878281] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 746.878281] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52740d99-d249-89ff-1456-5a78c9d5eef9" [ 746.878281] env[63345]: _type = "Task" [ 746.878281] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.886737] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52740d99-d249-89ff-1456-5a78c9d5eef9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.936814] env[63345]: DEBUG nova.compute.manager [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 746.970019] env[63345]: DEBUG nova.virt.hardware [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 746.970019] env[63345]: DEBUG nova.virt.hardware [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 746.970594] env[63345]: DEBUG nova.virt.hardware [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 746.972026] env[63345]: DEBUG nova.virt.hardware [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 746.972026] env[63345]: DEBUG nova.virt.hardware [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 746.972026] env[63345]: DEBUG nova.virt.hardware [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 746.972026] env[63345]: DEBUG nova.virt.hardware [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 746.972026] env[63345]: DEBUG nova.virt.hardware [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 746.972433] env[63345]: DEBUG nova.virt.hardware [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 746.972710] env[63345]: DEBUG nova.virt.hardware [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 746.973063] env[63345]: DEBUG nova.virt.hardware [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 746.974118] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23deae29-0ad4-4eca-8686-97b658c5bb82 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.987802] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f337e959-b6a9-4b04-aa24-9f0be4f9cbc0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.080947] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.173s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.085472] env[63345]: DEBUG oslo_concurrency.lockutils [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.486s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.087569] env[63345]: DEBUG nova.objects.instance [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Lazy-loading 'resources' on Instance uuid abc81fa5-78a9-48b1-a49e-2faffddf2411 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 747.118496] env[63345]: INFO nova.scheduler.client.report [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Deleted allocations for instance 0d5cb238-2d25-47b1-8ce6-15a20836dbfb [ 747.125924] env[63345]: DEBUG oslo_concurrency.lockutils [req-136c86a5-2201-48d3-af4a-82454de11aff req-5f043b5a-1a6f-4e1f-9aa8-5d5c8c2639c0 service nova] Releasing lock "refresh_cache-4a59b565-571f-48ef-97bd-bed9853e2d8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.134229] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': task-1016953, 'name': Rename_Task, 'duration_secs': 0.317911} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.134229] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 747.134229] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7120f11-be40-4e20-b87b-e5cb583046ce {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.141808] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Waiting for the task: (returnval){ [ 747.141808] env[63345]: value = "task-1016955" [ 747.141808] env[63345]: _type = "Task" [ 747.141808] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.151735] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': task-1016955, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.261743] env[63345]: DEBUG oslo_vmware.api [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016954, 'name': PowerOnVM_Task, 'duration_secs': 0.471575} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.262230] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 747.262572] env[63345]: INFO nova.compute.manager [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Took 8.19 seconds to spawn the instance on the hypervisor. [ 747.262925] env[63345]: DEBUG nova.compute.manager [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 747.263874] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7695742b-3458-45d1-8cdb-3310c9fbd9ab {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.396899] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52740d99-d249-89ff-1456-5a78c9d5eef9, 'name': SearchDatastore_Task, 'duration_secs': 0.007296} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.397671] env[63345]: DEBUG nova.network.neutron [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Successfully updated port: cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 747.404579] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Reconfiguring VM instance instance-0000001f to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 747.406021] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed5791d7-b942-4250-9653-1f15193027d8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.420250] env[63345]: DEBUG nova.compute.manager [req-e9f0fbad-ef81-4876-8b54-a6f7ed36b49e req-87b6ee78-cceb-4c4f-a75c-34a579834889 service nova] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Received event network-vif-plugged-cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 747.420499] env[63345]: DEBUG oslo_concurrency.lockutils [req-e9f0fbad-ef81-4876-8b54-a6f7ed36b49e req-87b6ee78-cceb-4c4f-a75c-34a579834889 service nova] Acquiring lock "778faa4f-4c5f-4ec2-b17b-5d7513c9c218-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.420714] env[63345]: DEBUG oslo_concurrency.lockutils [req-e9f0fbad-ef81-4876-8b54-a6f7ed36b49e req-87b6ee78-cceb-4c4f-a75c-34a579834889 service nova] Lock "778faa4f-4c5f-4ec2-b17b-5d7513c9c218-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.420882] env[63345]: DEBUG oslo_concurrency.lockutils [req-e9f0fbad-ef81-4876-8b54-a6f7ed36b49e req-87b6ee78-cceb-4c4f-a75c-34a579834889 service nova] Lock "778faa4f-4c5f-4ec2-b17b-5d7513c9c218-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.421057] env[63345]: DEBUG nova.compute.manager [req-e9f0fbad-ef81-4876-8b54-a6f7ed36b49e req-87b6ee78-cceb-4c4f-a75c-34a579834889 service nova] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] No waiting events found dispatching network-vif-plugged-cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 747.421227] env[63345]: WARNING nova.compute.manager [req-e9f0fbad-ef81-4876-8b54-a6f7ed36b49e req-87b6ee78-cceb-4c4f-a75c-34a579834889 service nova] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Received unexpected event network-vif-plugged-cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e for instance with vm_state building and task_state spawning. [ 747.428225] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 747.428225] env[63345]: value = "task-1016956" [ 747.428225] env[63345]: _type = "Task" [ 747.428225] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.436729] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016956, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.630182] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59c03649-375f-46fa-872a-0959a487545e tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "0d5cb238-2d25-47b1-8ce6-15a20836dbfb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.220s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.657276] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': task-1016955, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.783454] env[63345]: INFO nova.compute.manager [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Took 30.93 seconds to build instance. [ 747.847188] env[63345]: DEBUG nova.network.neutron [-] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.908513] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "refresh_cache-778faa4f-4c5f-4ec2-b17b-5d7513c9c218" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.908513] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "refresh_cache-778faa4f-4c5f-4ec2-b17b-5d7513c9c218" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.908513] env[63345]: DEBUG nova.network.neutron [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 747.939675] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016956, 'name': ReconfigVM_Task, 'duration_secs': 0.185422} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.939918] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Reconfigured VM instance instance-0000001f to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 747.940735] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c009e520-b979-4193-98e7-7220ada09c4a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.967392] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] 27c6dc17-4ded-4fe7-8fba-265eae64fc32/27c6dc17-4ded-4fe7-8fba-265eae64fc32.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 747.968206] env[63345]: DEBUG nova.compute.manager [req-f9b8d0e6-7e94-4927-9564-180c321c53d9 req-3714b54f-a087-4825-bbe0-56f8646b0461 service nova] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Received event network-vif-deleted-2eec7fb7-14bd-4975-ac39-8b00f81ac502 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 747.971106] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee4a5fa5-b7da-4252-ba9d-a3fab34c8c11 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.990405] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 747.990405] env[63345]: value = "task-1016957" [ 747.990405] env[63345]: _type = "Task" [ 747.990405] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.999256] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016957, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.055930] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39020af8-0b76-4058-8e08-2cff83b7db84 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.068225] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8387f2a7-4a1f-4cd3-8760-3a5663adedce {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.101059] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a11dcc9-c408-46b3-87a8-a54736d72184 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.108807] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9be443-e375-42b8-a40a-ce5e8e579691 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.122016] env[63345]: DEBUG nova.compute.provider_tree [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.156035] env[63345]: DEBUG oslo_vmware.api [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': task-1016955, 'name': PowerOnVM_Task, 'duration_secs': 0.58828} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.156035] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 748.156035] env[63345]: INFO nova.compute.manager [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Took 10.62 seconds to spawn the instance on the hypervisor. [ 748.156035] env[63345]: DEBUG nova.compute.manager [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 748.156035] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47855b25-2dd3-4d00-8b6c-900cdb091790 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.286133] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9af04613-bd8e-47de-beb2-b6d0ded6153a tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "6cbe136b-5bf6-4f17-bcef-b712d850615f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.109s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.353570] env[63345]: INFO nova.compute.manager [-] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Took 1.49 seconds to deallocate network for instance. [ 748.444934] env[63345]: DEBUG nova.network.neutron [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.500910] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016957, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.613588] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquiring lock "fe3e2b2a-9583-482e-b69b-6c130801d7db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.614089] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "fe3e2b2a-9583-482e-b69b-6c130801d7db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.628122] env[63345]: DEBUG nova.scheduler.client.report [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 748.659158] env[63345]: DEBUG nova.network.neutron [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Updating instance_info_cache with network_info: [{"id": "cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e", "address": "fa:16:3e:e5:8a:ad", "network": {"id": "80bb8388-e130-46af-a4fc-1daea51d1bf5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1343573007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "964cee117b3c4601b3afe82a8bb9c23e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdecc5d9-4e", "ovs_interfaceid": "cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.672803] env[63345]: INFO nova.compute.manager [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Took 34.58 seconds to build instance. [ 748.758799] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "e6bc8cb9-2f1a-49cb-974d-ea9a211126ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.758958] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "e6bc8cb9-2f1a-49cb-974d-ea9a211126ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.759195] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "e6bc8cb9-2f1a-49cb-974d-ea9a211126ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.759378] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "e6bc8cb9-2f1a-49cb-974d-ea9a211126ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.759555] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "e6bc8cb9-2f1a-49cb-974d-ea9a211126ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.762228] env[63345]: INFO nova.compute.manager [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Terminating instance [ 748.788501] env[63345]: DEBUG nova.compute.manager [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 748.860869] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.001524] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016957, 'name': ReconfigVM_Task, 'duration_secs': 0.690453} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.001804] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Reconfigured VM instance instance-0000001f to attach disk [datastore2] 27c6dc17-4ded-4fe7-8fba-265eae64fc32/27c6dc17-4ded-4fe7-8fba-265eae64fc32.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 749.002629] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e3dc77-672e-497b-a931-9ecb59ab7f70 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.020936] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acea914a-d136-4e4d-9546-7654669359fa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.040231] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d3bd3b-eb68-4f76-bd83-9912f560c6a5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.058101] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d615d44-ddb6-43f7-bcbd-ae42aa5e5954 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.065141] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 749.065389] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-15ab642c-88bd-40f9-ad14-36e27ee6f7ae {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.072259] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 749.072259] env[63345]: value = "task-1016958" [ 749.072259] env[63345]: _type = "Task" [ 749.072259] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.082402] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016958, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.131370] env[63345]: DEBUG oslo_concurrency.lockutils [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.046s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.133934] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.430s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.135492] env[63345]: INFO nova.compute.claims [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 749.161889] env[63345]: INFO nova.scheduler.client.report [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Deleted allocations for instance abc81fa5-78a9-48b1-a49e-2faffddf2411 [ 749.163055] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "refresh_cache-778faa4f-4c5f-4ec2-b17b-5d7513c9c218" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.163055] env[63345]: DEBUG nova.compute.manager [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Instance network_info: |[{"id": "cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e", "address": "fa:16:3e:e5:8a:ad", "network": {"id": "80bb8388-e130-46af-a4fc-1daea51d1bf5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1343573007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "964cee117b3c4601b3afe82a8bb9c23e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdecc5d9-4e", "ovs_interfaceid": "cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 749.165727] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:8a:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ddfb706a-add1-4e16-9ac4-d20b16a1df6d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 749.173171] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Creating folder: Project (964cee117b3c4601b3afe82a8bb9c23e). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 749.173752] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-adda781d-172c-4e35-a298-d181043783a8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.178330] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c6323670-b953-480f-8208-6fc5a2ff076f tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Lock "d3e99100-f13f-4019-9b5a-adaa65dacc5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.117s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.189438] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Created folder: Project (964cee117b3c4601b3afe82a8bb9c23e) in parent group-v225918. [ 749.189723] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Creating folder: Instances. Parent ref: group-v226012. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 749.190141] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e0b32ae-3ac6-403e-8880-fdaf1dc23942 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.199574] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Created folder: Instances in parent group-v226012. [ 749.199790] env[63345]: DEBUG oslo.service.loopingcall [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 749.201197] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 749.201580] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2cf1539-f5e4-4f5c-9cdc-7831fe9fbed1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.226765] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 749.226765] env[63345]: value = "task-1016961" [ 749.226765] env[63345]: _type = "Task" [ 749.226765] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.237448] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016961, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.266515] env[63345]: DEBUG nova.compute.manager [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 749.266756] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 749.267653] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068e6e5c-20c1-4272-b184-f3ee9e48133b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.275611] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 749.276140] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66a25e79-d551-4907-907b-d31a544b201b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.281594] env[63345]: DEBUG oslo_vmware.api [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 749.281594] env[63345]: value = "task-1016962" [ 749.281594] env[63345]: _type = "Task" [ 749.281594] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.289425] env[63345]: DEBUG oslo_vmware.api [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016962, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.306696] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.328674] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Acquiring lock "d3e99100-f13f-4019-9b5a-adaa65dacc5f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.329849] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Lock "d3e99100-f13f-4019-9b5a-adaa65dacc5f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.329849] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Acquiring lock "d3e99100-f13f-4019-9b5a-adaa65dacc5f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.329849] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Lock "d3e99100-f13f-4019-9b5a-adaa65dacc5f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.329849] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Lock "d3e99100-f13f-4019-9b5a-adaa65dacc5f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.332236] env[63345]: INFO nova.compute.manager [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Terminating instance [ 749.432488] env[63345]: DEBUG nova.compute.manager [req-23646140-c5c7-4894-a376-0ec44934a1de req-ea178d5e-1cfa-4d67-938e-7887b9ab88ad service nova] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Received event network-changed-cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 749.432676] env[63345]: DEBUG nova.compute.manager [req-23646140-c5c7-4894-a376-0ec44934a1de req-ea178d5e-1cfa-4d67-938e-7887b9ab88ad service nova] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Refreshing instance network info cache due to event network-changed-cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 749.432922] env[63345]: DEBUG oslo_concurrency.lockutils [req-23646140-c5c7-4894-a376-0ec44934a1de req-ea178d5e-1cfa-4d67-938e-7887b9ab88ad service nova] Acquiring lock "refresh_cache-778faa4f-4c5f-4ec2-b17b-5d7513c9c218" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.433082] env[63345]: DEBUG oslo_concurrency.lockutils [req-23646140-c5c7-4894-a376-0ec44934a1de req-ea178d5e-1cfa-4d67-938e-7887b9ab88ad service nova] Acquired lock "refresh_cache-778faa4f-4c5f-4ec2-b17b-5d7513c9c218" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.433267] env[63345]: DEBUG nova.network.neutron [req-23646140-c5c7-4894-a376-0ec44934a1de req-ea178d5e-1cfa-4d67-938e-7887b9ab88ad service nova] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Refreshing network info cache for port cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 749.579381] env[63345]: DEBUG nova.compute.manager [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 749.583481] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800a41b6-91a0-4ec7-bad5-e9802b6aa05c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.586042] env[63345]: DEBUG oslo_vmware.api [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016958, 'name': PowerOnVM_Task, 'duration_secs': 0.499931} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.586279] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 749.677755] env[63345]: DEBUG oslo_concurrency.lockutils [None req-20fee683-8a2f-46a5-b495-3d50cf4c60ce tempest-ImagesNegativeTestJSON-660770258 tempest-ImagesNegativeTestJSON-660770258-project-member] Lock "abc81fa5-78a9-48b1-a49e-2faffddf2411" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.582s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.681492] env[63345]: DEBUG nova.compute.manager [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 749.736387] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016961, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.793078] env[63345]: DEBUG oslo_vmware.api [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016962, 'name': PowerOffVM_Task, 'duration_secs': 0.279172} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.793231] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 749.793420] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 749.793637] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d6386cb-2063-412a-b47b-dce297435a38 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.836891] env[63345]: DEBUG nova.compute.manager [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 749.836891] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 749.837584] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a81c6f-bbbf-458d-9227-d4c2079a5f80 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.845421] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 749.845699] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-edad237e-7295-4217-ba91-c5cf35d71b65 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.852150] env[63345]: DEBUG oslo_vmware.api [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Waiting for the task: (returnval){ [ 749.852150] env[63345]: value = "task-1016964" [ 749.852150] env[63345]: _type = "Task" [ 749.852150] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.861073] env[63345]: DEBUG oslo_vmware.api [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': task-1016964, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.869855] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 749.870167] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 749.870505] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Deleting the datastore file [datastore2] e6bc8cb9-2f1a-49cb-974d-ea9a211126ee {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 749.870627] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3e8b41d-1d2e-4f56-8712-d629dee486ff {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.876853] env[63345]: DEBUG oslo_vmware.api [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 749.876853] env[63345]: value = "task-1016965" [ 749.876853] env[63345]: _type = "Task" [ 749.876853] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.885307] env[63345]: DEBUG oslo_vmware.api [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016965, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.097918] env[63345]: INFO nova.compute.manager [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] instance snapshotting [ 750.100708] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d18058ad-ae3d-4479-9cef-68eb6015b4f3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.123681] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65828f30-ec86-4e12-8e0c-7d4029d3a83a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.201755] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.239186] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016961, 'name': CreateVM_Task, 'duration_secs': 0.888375} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.239880] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 750.240030] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.242396] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.242396] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 750.242396] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cc5261c-d805-4b1b-afe2-aae9162537cf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.246928] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 750.246928] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52dfeb9d-14bb-f653-a9c9-4f61f7b9769b" [ 750.246928] env[63345]: _type = "Task" [ 750.246928] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.258721] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52dfeb9d-14bb-f653-a9c9-4f61f7b9769b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.274083] env[63345]: DEBUG nova.network.neutron [req-23646140-c5c7-4894-a376-0ec44934a1de req-ea178d5e-1cfa-4d67-938e-7887b9ab88ad service nova] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Updated VIF entry in instance network info cache for port cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 750.274610] env[63345]: DEBUG nova.network.neutron [req-23646140-c5c7-4894-a376-0ec44934a1de req-ea178d5e-1cfa-4d67-938e-7887b9ab88ad service nova] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Updating instance_info_cache with network_info: [{"id": "cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e", "address": "fa:16:3e:e5:8a:ad", "network": {"id": "80bb8388-e130-46af-a4fc-1daea51d1bf5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1343573007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "964cee117b3c4601b3afe82a8bb9c23e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdecc5d9-4e", "ovs_interfaceid": "cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.370575] env[63345]: DEBUG oslo_vmware.api [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': task-1016964, 'name': PowerOffVM_Task, 'duration_secs': 0.234318} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.370867] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 750.371478] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 750.371478] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53a10fd0-78a8-4fbe-b428-4d11464ecc38 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.386365] env[63345]: DEBUG oslo_vmware.api [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1016965, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.208722} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.389359] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 750.389600] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 750.390592] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 750.390592] env[63345]: INFO nova.compute.manager [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Took 1.12 seconds to destroy the instance on the hypervisor. [ 750.390592] env[63345]: DEBUG oslo.service.loopingcall [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 750.390910] env[63345]: DEBUG nova.compute.manager [-] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 750.391055] env[63345]: DEBUG nova.network.neutron [-] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 750.451192] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 750.451192] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Deleting contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 750.451192] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Deleting the datastore file [datastore1] d3e99100-f13f-4019-9b5a-adaa65dacc5f {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 750.451192] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46709d11-9d70-4509-aa29-8c2b5f282f1c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.457452] env[63345]: DEBUG oslo_vmware.api [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Waiting for the task: (returnval){ [ 750.457452] env[63345]: value = "task-1016967" [ 750.457452] env[63345]: _type = "Task" [ 750.457452] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.471962] env[63345]: DEBUG oslo_vmware.api [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': task-1016967, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.603179] env[63345]: INFO nova.compute.manager [None req-2cac5929-5701-4cd3-b462-ff4ee05a1b55 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Updating instance to original state: 'active' [ 750.635027] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b85a4c5f-a9aa-4b8f-b7f1-f618cae4cd48 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.641263] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Creating Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 750.641676] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-02ca9280-7957-48c8-8b64-e8e675b5fafe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.648927] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b243c53d-b7d0-4986-9813-e22db931662a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.654571] env[63345]: DEBUG oslo_vmware.api [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 750.654571] env[63345]: value = "task-1016968" [ 750.654571] env[63345]: _type = "Task" [ 750.654571] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.688033] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bee8938-9c51-407b-b724-d7664291a627 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.693624] env[63345]: DEBUG oslo_vmware.api [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016968, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.698708] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24cc4266-4738-4764-8280-d6090ff9f09c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.717464] env[63345]: DEBUG nova.compute.provider_tree [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.763318] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52dfeb9d-14bb-f653-a9c9-4f61f7b9769b, 'name': SearchDatastore_Task, 'duration_secs': 0.01331} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.763769] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.764145] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 750.764510] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.764753] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.765048] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 750.766397] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3998703b-6af8-4548-ae34-4c71e9e9e954 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.776551] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 750.776738] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 750.777836] env[63345]: DEBUG oslo_concurrency.lockutils [req-23646140-c5c7-4894-a376-0ec44934a1de req-ea178d5e-1cfa-4d67-938e-7887b9ab88ad service nova] Releasing lock "refresh_cache-778faa4f-4c5f-4ec2-b17b-5d7513c9c218" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.778186] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c294573-5e27-45a5-bc5a-19c1ff07e700 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.784275] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 750.784275] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52cb3301-7841-c89e-1762-7836f18dd0a9" [ 750.784275] env[63345]: _type = "Task" [ 750.784275] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.801677] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52cb3301-7841-c89e-1762-7836f18dd0a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.971714] env[63345]: DEBUG oslo_vmware.api [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Task: {'id': task-1016967, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.377617} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.975019] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 750.975019] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Deleted contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 750.975019] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 750.975019] env[63345]: INFO nova.compute.manager [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 750.975019] env[63345]: DEBUG oslo.service.loopingcall [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 750.975325] env[63345]: DEBUG nova.compute.manager [-] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 750.975325] env[63345]: DEBUG nova.network.neutron [-] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 751.168724] env[63345]: DEBUG oslo_vmware.api [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016968, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.221215] env[63345]: DEBUG nova.scheduler.client.report [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 751.250884] env[63345]: DEBUG nova.network.neutron [-] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.297397] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52cb3301-7841-c89e-1762-7836f18dd0a9, 'name': SearchDatastore_Task, 'duration_secs': 0.013879} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.297397] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fa0c526-9c5d-414c-9d71-c6a086ff5749 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.302615] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 751.302615] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]522bcbe0-25e3-f59c-8c0e-7d1a053428d4" [ 751.302615] env[63345]: _type = "Task" [ 751.302615] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.310475] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]522bcbe0-25e3-f59c-8c0e-7d1a053428d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.465984] env[63345]: DEBUG nova.compute.manager [req-067cc765-7700-4c41-abfd-32a9dedd6de6 req-c41ebb0e-274b-4573-9995-372bd8781593 service nova] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Received event network-vif-deleted-829240e3-b053-450a-90f2-13fc659f12ca {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 751.466098] env[63345]: DEBUG nova.compute.manager [req-067cc765-7700-4c41-abfd-32a9dedd6de6 req-c41ebb0e-274b-4573-9995-372bd8781593 service nova] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Received event network-vif-deleted-87ece051-f9a8-483b-bc74-1e13e76bdd75 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 751.466896] env[63345]: INFO nova.compute.manager [req-067cc765-7700-4c41-abfd-32a9dedd6de6 req-c41ebb0e-274b-4573-9995-372bd8781593 service nova] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Neutron deleted interface 87ece051-f9a8-483b-bc74-1e13e76bdd75; detaching it from the instance and deleting it from the info cache [ 751.466896] env[63345]: DEBUG nova.network.neutron [req-067cc765-7700-4c41-abfd-32a9dedd6de6 req-c41ebb0e-274b-4573-9995-372bd8781593 service nova] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.666422] env[63345]: DEBUG oslo_vmware.api [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016968, 'name': CreateSnapshot_Task, 'duration_secs': 0.984152} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.666720] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Created Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 751.667503] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366be317-888c-48fe-b42e-cec7f8b1bb0f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.726655] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.593s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.727445] env[63345]: DEBUG nova.compute.manager [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 751.731076] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.265s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.733031] env[63345]: INFO nova.compute.claims [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 751.753907] env[63345]: INFO nova.compute.manager [-] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Took 1.36 seconds to deallocate network for instance. [ 751.765998] env[63345]: DEBUG nova.network.neutron [-] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.817747] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]522bcbe0-25e3-f59c-8c0e-7d1a053428d4, 'name': SearchDatastore_Task, 'duration_secs': 0.010784} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.818123] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.818472] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] 778faa4f-4c5f-4ec2-b17b-5d7513c9c218/778faa4f-4c5f-4ec2-b17b-5d7513c9c218.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 751.818799] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78e02a2e-f400-4f3a-9dbc-1ca742659beb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.831156] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 751.831156] env[63345]: value = "task-1016969" [ 751.831156] env[63345]: _type = "Task" [ 751.831156] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.838286] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016969, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.969669] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3ea849f1-839f-44b4-9bb1-698e622c6121 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.979363] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea92ae7-d671-44c8-ba58-d92f9a66289e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.012476] env[63345]: DEBUG nova.compute.manager [req-067cc765-7700-4c41-abfd-32a9dedd6de6 req-c41ebb0e-274b-4573-9995-372bd8781593 service nova] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Detach interface failed, port_id=87ece051-f9a8-483b-bc74-1e13e76bdd75, reason: Instance d3e99100-f13f-4019-9b5a-adaa65dacc5f could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 752.189298] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Creating linked-clone VM from snapshot {{(pid=63345) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 752.189683] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8ab17bf4-21d2-4c6c-ad79-1edb47cf2f03 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.198646] env[63345]: DEBUG oslo_vmware.api [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 752.198646] env[63345]: value = "task-1016970" [ 752.198646] env[63345]: _type = "Task" [ 752.198646] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.207774] env[63345]: DEBUG oslo_vmware.api [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016970, 'name': CloneVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.239251] env[63345]: DEBUG nova.compute.utils [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 752.244391] env[63345]: DEBUG nova.compute.manager [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 752.244391] env[63345]: DEBUG nova.network.neutron [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 752.261568] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.269616] env[63345]: INFO nova.compute.manager [-] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Took 1.30 seconds to deallocate network for instance. [ 752.309266] env[63345]: DEBUG nova.policy [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd19438fc9e0f4b208dcbabb9faf1f690', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4919485c7e184230b38e703f7ce8a047', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 752.342342] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016969, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.371644] env[63345]: DEBUG oslo_concurrency.lockutils [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "27c6dc17-4ded-4fe7-8fba-265eae64fc32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.372100] env[63345]: DEBUG oslo_concurrency.lockutils [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "27c6dc17-4ded-4fe7-8fba-265eae64fc32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.372361] env[63345]: DEBUG oslo_concurrency.lockutils [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "27c6dc17-4ded-4fe7-8fba-265eae64fc32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.373039] env[63345]: DEBUG oslo_concurrency.lockutils [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "27c6dc17-4ded-4fe7-8fba-265eae64fc32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.373039] env[63345]: DEBUG oslo_concurrency.lockutils [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "27c6dc17-4ded-4fe7-8fba-265eae64fc32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.377764] env[63345]: INFO nova.compute.manager [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Terminating instance [ 752.641755] env[63345]: DEBUG nova.network.neutron [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Successfully created port: b8d03603-203a-4bdb-ac34-d490f6f611b2 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 752.711461] env[63345]: DEBUG oslo_vmware.api [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016970, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.744561] env[63345]: DEBUG nova.compute.manager [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 752.777851] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.842467] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016969, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.861368} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.842467] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] 778faa4f-4c5f-4ec2-b17b-5d7513c9c218/778faa4f-4c5f-4ec2-b17b-5d7513c9c218.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 752.842467] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 752.842467] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f42581e1-51f1-419b-8ad2-0dc0f7afc10a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.849225] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 752.849225] env[63345]: value = "task-1016971" [ 752.849225] env[63345]: _type = "Task" [ 752.849225] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.860261] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016971, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.884849] env[63345]: DEBUG nova.compute.manager [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 752.885093] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 752.886496] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533d91cb-bcc1-4133-91ed-d91d93ca1f60 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.965055] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 752.965055] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-514b8e77-dcad-4323-bdff-7d758365fc9b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.974905] env[63345]: DEBUG oslo_vmware.api [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 752.974905] env[63345]: value = "task-1016972" [ 752.974905] env[63345]: _type = "Task" [ 752.974905] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.987021] env[63345]: DEBUG oslo_vmware.api [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016972, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.009407] env[63345]: DEBUG nova.network.neutron [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Successfully created port: e5fd2647-d313-4830-9b9d-0722b78abadb {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 753.217821] env[63345]: DEBUG oslo_vmware.api [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016970, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.361931] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016971, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.459974} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.362426] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 753.363602] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc8326e-da26-4d14-977e-72403ccd7b5e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.392642] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 778faa4f-4c5f-4ec2-b17b-5d7513c9c218/778faa4f-4c5f-4ec2-b17b-5d7513c9c218.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 753.396051] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-991dd5ea-5951-4a85-a43c-ac2c827a4e8c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.419196] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 753.419196] env[63345]: value = "task-1016973" [ 753.419196] env[63345]: _type = "Task" [ 753.419196] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.432384] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016973, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.487146] env[63345]: DEBUG oslo_vmware.api [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016972, 'name': PowerOffVM_Task, 'duration_secs': 0.200048} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.487542] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 753.487756] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 753.488925] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ed27e4ea-fdd0-44bb-9633-21255d77fc8d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.491118] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1427f74b-7570-4879-a16b-3cff2fc0cb37 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.498157] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e82b00f-7818-4a16-8998-8c997d256a9b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.537024] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52729796-5434-4ce0-97f9-9b17dbdbdfce {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.545017] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4465a5-8931-48d1-8ad8-c80a3ebc0dc4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.560151] env[63345]: DEBUG nova.compute.provider_tree [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.562754] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 753.565637] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 753.565637] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Deleting the datastore file [datastore2] 27c6dc17-4ded-4fe7-8fba-265eae64fc32 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 753.565637] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ddb7fbe-bebf-48e1-8150-ea61426f48f1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.569345] env[63345]: DEBUG oslo_vmware.api [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for the task: (returnval){ [ 753.569345] env[63345]: value = "task-1016975" [ 753.569345] env[63345]: _type = "Task" [ 753.569345] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.576999] env[63345]: DEBUG oslo_vmware.api [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016975, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.712323] env[63345]: DEBUG oslo_vmware.api [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016970, 'name': CloneVM_Task} progress is 95%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.762270] env[63345]: DEBUG nova.compute.manager [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 753.787846] env[63345]: DEBUG nova.virt.hardware [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 753.788541] env[63345]: DEBUG nova.virt.hardware [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 753.788747] env[63345]: DEBUG nova.virt.hardware [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 753.788949] env[63345]: DEBUG nova.virt.hardware [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 753.789229] env[63345]: DEBUG nova.virt.hardware [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 753.789410] env[63345]: DEBUG nova.virt.hardware [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 753.789627] env[63345]: DEBUG nova.virt.hardware [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 753.789789] env[63345]: DEBUG nova.virt.hardware [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 753.789957] env[63345]: DEBUG nova.virt.hardware [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 753.790136] env[63345]: DEBUG nova.virt.hardware [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 753.790314] env[63345]: DEBUG nova.virt.hardware [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 753.791225] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324b8f73-b963-4a72-a3c2-8435748c506c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.799267] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c78b2fd-230e-4073-85c6-b120bd3575e8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.929999] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016973, 'name': ReconfigVM_Task, 'duration_secs': 0.366859} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.930318] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 778faa4f-4c5f-4ec2-b17b-5d7513c9c218/778faa4f-4c5f-4ec2-b17b-5d7513c9c218.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 753.931026] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f5c0e208-fe65-409d-ab50-53c480017595 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.937550] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 753.937550] env[63345]: value = "task-1016976" [ 753.937550] env[63345]: _type = "Task" [ 753.937550] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.946766] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016976, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.065391] env[63345]: DEBUG nova.scheduler.client.report [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 754.079445] env[63345]: DEBUG oslo_vmware.api [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Task: {'id': task-1016975, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19564} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.079709] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 754.079914] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 754.080106] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 754.081043] env[63345]: INFO nova.compute.manager [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Took 1.20 seconds to destroy the instance on the hypervisor. [ 754.081043] env[63345]: DEBUG oslo.service.loopingcall [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 754.081043] env[63345]: DEBUG nova.compute.manager [-] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 754.081043] env[63345]: DEBUG nova.network.neutron [-] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 754.214300] env[63345]: DEBUG oslo_vmware.api [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1016970, 'name': CloneVM_Task, 'duration_secs': 1.782132} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.214581] env[63345]: INFO nova.virt.vmwareapi.vmops [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Created linked-clone VM from snapshot [ 754.215349] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9593c6a-6705-4bf1-8ad3-d5b61dd79967 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.222709] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Uploading image f0cb364f-cc7f-4213-88ce-b8773612e90e {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 754.245290] env[63345]: DEBUG oslo_vmware.rw_handles [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 754.245290] env[63345]: value = "vm-226016" [ 754.245290] env[63345]: _type = "VirtualMachine" [ 754.245290] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 754.245590] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-fc2ae007-0678-4c8d-afe3-03ff5915611b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.253153] env[63345]: DEBUG oslo_vmware.rw_handles [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lease: (returnval){ [ 754.253153] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]520f8594-cec0-a0b8-4240-068e6c110533" [ 754.253153] env[63345]: _type = "HttpNfcLease" [ 754.253153] env[63345]: } obtained for exporting VM: (result){ [ 754.253153] env[63345]: value = "vm-226016" [ 754.253153] env[63345]: _type = "VirtualMachine" [ 754.253153] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 754.253499] env[63345]: DEBUG oslo_vmware.api [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the lease: (returnval){ [ 754.253499] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]520f8594-cec0-a0b8-4240-068e6c110533" [ 754.253499] env[63345]: _type = "HttpNfcLease" [ 754.253499] env[63345]: } to be ready. {{(pid=63345) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 754.259773] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 754.259773] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]520f8594-cec0-a0b8-4240-068e6c110533" [ 754.259773] env[63345]: _type = "HttpNfcLease" [ 754.259773] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 754.455545] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016976, 'name': Rename_Task, 'duration_secs': 0.193985} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.456384] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 754.456859] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ecc48206-e554-46b7-9524-4224d528af7f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.469401] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 754.469401] env[63345]: value = "task-1016978" [ 754.469401] env[63345]: _type = "Task" [ 754.469401] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.480682] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016978, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.575574] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.844s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.576539] env[63345]: DEBUG nova.compute.manager [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 754.580966] env[63345]: DEBUG oslo_concurrency.lockutils [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.739s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.581148] env[63345]: DEBUG nova.objects.instance [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lazy-loading 'resources' on Instance uuid 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 754.633436] env[63345]: DEBUG nova.compute.manager [req-37a15d48-cb82-45c2-9886-630d505d2992 req-ccb0e8bf-8043-4bab-9810-86f90aec3ea4 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Received event network-vif-plugged-b8d03603-203a-4bdb-ac34-d490f6f611b2 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 754.633600] env[63345]: DEBUG oslo_concurrency.lockutils [req-37a15d48-cb82-45c2-9886-630d505d2992 req-ccb0e8bf-8043-4bab-9810-86f90aec3ea4 service nova] Acquiring lock "93112cc1-f9a1-4188-9555-bddf483426a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.633841] env[63345]: DEBUG oslo_concurrency.lockutils [req-37a15d48-cb82-45c2-9886-630d505d2992 req-ccb0e8bf-8043-4bab-9810-86f90aec3ea4 service nova] Lock "93112cc1-f9a1-4188-9555-bddf483426a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.634405] env[63345]: DEBUG oslo_concurrency.lockutils [req-37a15d48-cb82-45c2-9886-630d505d2992 req-ccb0e8bf-8043-4bab-9810-86f90aec3ea4 service nova] Lock "93112cc1-f9a1-4188-9555-bddf483426a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.634561] env[63345]: DEBUG nova.compute.manager [req-37a15d48-cb82-45c2-9886-630d505d2992 req-ccb0e8bf-8043-4bab-9810-86f90aec3ea4 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] No waiting events found dispatching network-vif-plugged-b8d03603-203a-4bdb-ac34-d490f6f611b2 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 754.634803] env[63345]: WARNING nova.compute.manager [req-37a15d48-cb82-45c2-9886-630d505d2992 req-ccb0e8bf-8043-4bab-9810-86f90aec3ea4 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Received unexpected event network-vif-plugged-b8d03603-203a-4bdb-ac34-d490f6f611b2 for instance with vm_state building and task_state spawning. [ 754.760447] env[63345]: DEBUG nova.compute.manager [req-c1df66f2-626e-43ad-9532-de898056daee req-7d02e789-3517-4fa9-825b-146a76aa82d3 service nova] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Received event network-vif-deleted-8c1bd582-6867-4cba-9522-0e03560fa3f7 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 754.760678] env[63345]: INFO nova.compute.manager [req-c1df66f2-626e-43ad-9532-de898056daee req-7d02e789-3517-4fa9-825b-146a76aa82d3 service nova] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Neutron deleted interface 8c1bd582-6867-4cba-9522-0e03560fa3f7; detaching it from the instance and deleting it from the info cache [ 754.760745] env[63345]: DEBUG nova.network.neutron [req-c1df66f2-626e-43ad-9532-de898056daee req-7d02e789-3517-4fa9-825b-146a76aa82d3 service nova] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.766580] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 754.766580] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]520f8594-cec0-a0b8-4240-068e6c110533" [ 754.766580] env[63345]: _type = "HttpNfcLease" [ 754.766580] env[63345]: } is ready. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 754.766919] env[63345]: DEBUG oslo_vmware.rw_handles [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 754.766919] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]520f8594-cec0-a0b8-4240-068e6c110533" [ 754.766919] env[63345]: _type = "HttpNfcLease" [ 754.766919] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 754.767691] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715cb99b-1310-4fe6-9b9a-393da22caee0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.778685] env[63345]: DEBUG oslo_vmware.rw_handles [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5250a575-72fb-19e5-6687-e54f1cb00fa1/disk-0.vmdk from lease info. {{(pid=63345) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 754.778892] env[63345]: DEBUG oslo_vmware.rw_handles [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5250a575-72fb-19e5-6687-e54f1cb00fa1/disk-0.vmdk for reading. {{(pid=63345) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 754.849201] env[63345]: DEBUG nova.network.neutron [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Successfully updated port: b8d03603-203a-4bdb-ac34-d490f6f611b2 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 754.958655] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bd0c484d-743f-4729-a4ae-c6ba5a4d09be {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.975622] env[63345]: DEBUG oslo_vmware.api [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016978, 'name': PowerOnVM_Task, 'duration_secs': 0.475514} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.977465] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 754.977700] env[63345]: INFO nova.compute.manager [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Took 8.04 seconds to spawn the instance on the hypervisor. [ 754.977891] env[63345]: DEBUG nova.compute.manager [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 754.978821] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6772e98e-f9b0-4847-82d6-6511fd5a0379 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.087642] env[63345]: DEBUG nova.compute.utils [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 755.091862] env[63345]: DEBUG nova.compute.manager [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 755.092078] env[63345]: DEBUG nova.network.neutron [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 755.148150] env[63345]: DEBUG nova.policy [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fce058d27d8e4da19af436b282b37f32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63d7b3facae6416989f763e610cf98f7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 755.148150] env[63345]: DEBUG nova.network.neutron [-] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.263473] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e9c111f3-b70f-4658-bf26-104a3f63848a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.273414] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12adbdc-4e45-4c3c-849b-7c7e97c9c92d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.308221] env[63345]: DEBUG nova.compute.manager [req-c1df66f2-626e-43ad-9532-de898056daee req-7d02e789-3517-4fa9-825b-146a76aa82d3 service nova] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Detach interface failed, port_id=8c1bd582-6867-4cba-9522-0e03560fa3f7, reason: Instance 27c6dc17-4ded-4fe7-8fba-265eae64fc32 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 755.455946] env[63345]: DEBUG nova.network.neutron [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Successfully created port: b14c7ab5-3080-4f38-a677-17206714df35 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 755.508151] env[63345]: INFO nova.compute.manager [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Took 35.64 seconds to build instance. [ 755.595847] env[63345]: DEBUG nova.compute.manager [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 755.652832] env[63345]: INFO nova.compute.manager [-] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Took 1.57 seconds to deallocate network for instance. [ 755.706812] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7db994-7cf2-40d9-89ea-1cfc887847d0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.716882] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01f8a81-fbf1-4e22-a203-921ed84ba6e9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.756201] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-542b9d07-bb2d-4a61-8067-4974c50bc8d9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.766555] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6def2506-5015-462c-9af9-1572df71501c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.781469] env[63345]: DEBUG nova.compute.provider_tree [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 756.012971] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4adad57b-fa7a-400c-aee2-9d511ee15f72 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "778faa4f-4c5f-4ec2-b17b-5d7513c9c218" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 118.495s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.167570] env[63345]: DEBUG oslo_concurrency.lockutils [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.305391] env[63345]: ERROR nova.scheduler.client.report [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] [req-09ff837d-87c1-48aa-b02d-5c4b43c3fd23] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-09ff837d-87c1-48aa-b02d-5c4b43c3fd23"}]} [ 756.322478] env[63345]: DEBUG nova.scheduler.client.report [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 756.339259] env[63345]: DEBUG nova.scheduler.client.report [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 756.339513] env[63345]: DEBUG nova.compute.provider_tree [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 756.350828] env[63345]: DEBUG nova.scheduler.client.report [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 756.373471] env[63345]: DEBUG nova.scheduler.client.report [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 756.518307] env[63345]: DEBUG nova.compute.manager [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 756.616611] env[63345]: DEBUG nova.compute.manager [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 756.648478] env[63345]: DEBUG nova.virt.hardware [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 756.648748] env[63345]: DEBUG nova.virt.hardware [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 756.648930] env[63345]: DEBUG nova.virt.hardware [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 756.649163] env[63345]: DEBUG nova.virt.hardware [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 756.649377] env[63345]: DEBUG nova.virt.hardware [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 756.649554] env[63345]: DEBUG nova.virt.hardware [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 756.649778] env[63345]: DEBUG nova.virt.hardware [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 756.649944] env[63345]: DEBUG nova.virt.hardware [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 756.650132] env[63345]: DEBUG nova.virt.hardware [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 756.650304] env[63345]: DEBUG nova.virt.hardware [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 756.650501] env[63345]: DEBUG nova.virt.hardware [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 756.651412] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2127af51-39e5-499d-8ff4-09892b919f48 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.661038] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b53e176-c152-42a3-b663-e3c1ae35b3c2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.757679] env[63345]: DEBUG nova.compute.manager [req-6244e69c-605e-4aeb-b001-9b40fee315e3 req-0390fc45-91ce-479e-b122-9a87883a36d1 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Received event network-changed-b8d03603-203a-4bdb-ac34-d490f6f611b2 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 756.757903] env[63345]: DEBUG nova.compute.manager [req-6244e69c-605e-4aeb-b001-9b40fee315e3 req-0390fc45-91ce-479e-b122-9a87883a36d1 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Refreshing instance network info cache due to event network-changed-b8d03603-203a-4bdb-ac34-d490f6f611b2. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 756.758142] env[63345]: DEBUG oslo_concurrency.lockutils [req-6244e69c-605e-4aeb-b001-9b40fee315e3 req-0390fc45-91ce-479e-b122-9a87883a36d1 service nova] Acquiring lock "refresh_cache-93112cc1-f9a1-4188-9555-bddf483426a1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.758295] env[63345]: DEBUG oslo_concurrency.lockutils [req-6244e69c-605e-4aeb-b001-9b40fee315e3 req-0390fc45-91ce-479e-b122-9a87883a36d1 service nova] Acquired lock "refresh_cache-93112cc1-f9a1-4188-9555-bddf483426a1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.758468] env[63345]: DEBUG nova.network.neutron [req-6244e69c-605e-4aeb-b001-9b40fee315e3 req-0390fc45-91ce-479e-b122-9a87883a36d1 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Refreshing network info cache for port b8d03603-203a-4bdb-ac34-d490f6f611b2 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 756.866666] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60debd8d-a8df-4fc4-bca6-55d0db4ee185 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.876302] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e008e3a0-27cb-4891-8da2-49a5deb718a0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.910302] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5153d443-39a5-4a2f-a54e-066bd6b1d075 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.920182] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba47058b-5415-4de9-86fd-cb64dc04a59f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.935418] env[63345]: DEBUG nova.compute.provider_tree [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 757.042788] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.078219] env[63345]: DEBUG nova.network.neutron [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Successfully updated port: b14c7ab5-3080-4f38-a677-17206714df35 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 757.260628] env[63345]: INFO nova.compute.manager [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Rebuilding instance [ 757.310098] env[63345]: DEBUG nova.network.neutron [req-6244e69c-605e-4aeb-b001-9b40fee315e3 req-0390fc45-91ce-479e-b122-9a87883a36d1 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 757.337061] env[63345]: DEBUG nova.compute.manager [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 757.337870] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3361c4-4703-4340-97ab-14f1a45d2045 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.349768] env[63345]: DEBUG nova.compute.manager [req-4efa6473-7301-480a-abf6-5b3a569e1d11 req-c71ad56e-da9f-4d42-a6db-1fa382bc5a4d service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Received event network-vif-plugged-e5fd2647-d313-4830-9b9d-0722b78abadb {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 757.350018] env[63345]: DEBUG oslo_concurrency.lockutils [req-4efa6473-7301-480a-abf6-5b3a569e1d11 req-c71ad56e-da9f-4d42-a6db-1fa382bc5a4d service nova] Acquiring lock "93112cc1-f9a1-4188-9555-bddf483426a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.350250] env[63345]: DEBUG oslo_concurrency.lockutils [req-4efa6473-7301-480a-abf6-5b3a569e1d11 req-c71ad56e-da9f-4d42-a6db-1fa382bc5a4d service nova] Lock "93112cc1-f9a1-4188-9555-bddf483426a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.350456] env[63345]: DEBUG oslo_concurrency.lockutils [req-4efa6473-7301-480a-abf6-5b3a569e1d11 req-c71ad56e-da9f-4d42-a6db-1fa382bc5a4d service nova] Lock "93112cc1-f9a1-4188-9555-bddf483426a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.350673] env[63345]: DEBUG nova.compute.manager [req-4efa6473-7301-480a-abf6-5b3a569e1d11 req-c71ad56e-da9f-4d42-a6db-1fa382bc5a4d service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] No waiting events found dispatching network-vif-plugged-e5fd2647-d313-4830-9b9d-0722b78abadb {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 757.350869] env[63345]: WARNING nova.compute.manager [req-4efa6473-7301-480a-abf6-5b3a569e1d11 req-c71ad56e-da9f-4d42-a6db-1fa382bc5a4d service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Received unexpected event network-vif-plugged-e5fd2647-d313-4830-9b9d-0722b78abadb for instance with vm_state building and task_state spawning. [ 757.427681] env[63345]: DEBUG nova.network.neutron [req-6244e69c-605e-4aeb-b001-9b40fee315e3 req-0390fc45-91ce-479e-b122-9a87883a36d1 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.469610] env[63345]: DEBUG nova.scheduler.client.report [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 79 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 757.469756] env[63345]: DEBUG nova.compute.provider_tree [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 79 to 80 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 757.469896] env[63345]: DEBUG nova.compute.provider_tree [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 757.581326] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "refresh_cache-e3d52cbd-e768-4425-b83e-180a6e58fd00" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.581522] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "refresh_cache-e3d52cbd-e768-4425-b83e-180a6e58fd00" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.581623] env[63345]: DEBUG nova.network.neutron [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 757.721684] env[63345]: DEBUG nova.network.neutron [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Successfully updated port: e5fd2647-d313-4830-9b9d-0722b78abadb {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 757.749220] env[63345]: DEBUG nova.compute.manager [req-a7fc72fb-4095-40a2-a463-f56bca3ee2bb req-a5796da0-98f2-4fe8-aec4-aecc1b5b04c6 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Received event network-changed-e5fd2647-d313-4830-9b9d-0722b78abadb {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 757.749421] env[63345]: DEBUG nova.compute.manager [req-a7fc72fb-4095-40a2-a463-f56bca3ee2bb req-a5796da0-98f2-4fe8-aec4-aecc1b5b04c6 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Refreshing instance network info cache due to event network-changed-e5fd2647-d313-4830-9b9d-0722b78abadb. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 757.749623] env[63345]: DEBUG oslo_concurrency.lockutils [req-a7fc72fb-4095-40a2-a463-f56bca3ee2bb req-a5796da0-98f2-4fe8-aec4-aecc1b5b04c6 service nova] Acquiring lock "refresh_cache-93112cc1-f9a1-4188-9555-bddf483426a1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.932059] env[63345]: DEBUG oslo_concurrency.lockutils [req-6244e69c-605e-4aeb-b001-9b40fee315e3 req-0390fc45-91ce-479e-b122-9a87883a36d1 service nova] Releasing lock "refresh_cache-93112cc1-f9a1-4188-9555-bddf483426a1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.932469] env[63345]: DEBUG oslo_concurrency.lockutils [req-a7fc72fb-4095-40a2-a463-f56bca3ee2bb req-a5796da0-98f2-4fe8-aec4-aecc1b5b04c6 service nova] Acquired lock "refresh_cache-93112cc1-f9a1-4188-9555-bddf483426a1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.932469] env[63345]: DEBUG nova.network.neutron [req-a7fc72fb-4095-40a2-a463-f56bca3ee2bb req-a5796da0-98f2-4fe8-aec4-aecc1b5b04c6 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Refreshing network info cache for port e5fd2647-d313-4830-9b9d-0722b78abadb {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 757.975272] env[63345]: DEBUG oslo_concurrency.lockutils [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.394s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.977618] env[63345]: DEBUG oslo_concurrency.lockutils [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.134s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.977903] env[63345]: DEBUG nova.objects.instance [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lazy-loading 'resources' on Instance uuid 34e0234c-36c4-4878-979b-46f045bd1785 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 757.995496] env[63345]: INFO nova.scheduler.client.report [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Deleted allocations for instance 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c [ 758.135084] env[63345]: DEBUG nova.network.neutron [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 758.225538] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquiring lock "refresh_cache-93112cc1-f9a1-4188-9555-bddf483426a1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.355414] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 758.356518] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b32cca0-bbcf-4c57-ac45-f83ec3058bbd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.363882] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 758.363882] env[63345]: value = "task-1016979" [ 758.363882] env[63345]: _type = "Task" [ 758.363882] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.375392] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016979, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.439900] env[63345]: DEBUG nova.network.neutron [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Updating instance_info_cache with network_info: [{"id": "b14c7ab5-3080-4f38-a677-17206714df35", "address": "fa:16:3e:3e:08:ff", "network": {"id": "f05df594-fc76-4e2d-b29b-6942fee8dc99", "bridge": "br-int", "label": "tempest-ServersTestJSON-241206779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63d7b3facae6416989f763e610cf98f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb14c7ab5-30", "ovs_interfaceid": "b14c7ab5-3080-4f38-a677-17206714df35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.476530] env[63345]: DEBUG nova.network.neutron [req-a7fc72fb-4095-40a2-a463-f56bca3ee2bb req-a5796da0-98f2-4fe8-aec4-aecc1b5b04c6 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 758.512318] env[63345]: DEBUG oslo_concurrency.lockutils [None req-46d06c0e-8898-48ae-8f40-b29c17280d4b tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.051s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.603753] env[63345]: DEBUG nova.network.neutron [req-a7fc72fb-4095-40a2-a463-f56bca3ee2bb req-a5796da0-98f2-4fe8-aec4-aecc1b5b04c6 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.779144] env[63345]: DEBUG nova.compute.manager [req-505fbf71-b883-4608-bd33-bb1acbcac5fe req-e5cf1af8-c8d0-4160-9d77-683b665a1c19 service nova] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Received event network-vif-plugged-b14c7ab5-3080-4f38-a677-17206714df35 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 758.779359] env[63345]: DEBUG oslo_concurrency.lockutils [req-505fbf71-b883-4608-bd33-bb1acbcac5fe req-e5cf1af8-c8d0-4160-9d77-683b665a1c19 service nova] Acquiring lock "e3d52cbd-e768-4425-b83e-180a6e58fd00-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.779608] env[63345]: DEBUG oslo_concurrency.lockutils [req-505fbf71-b883-4608-bd33-bb1acbcac5fe req-e5cf1af8-c8d0-4160-9d77-683b665a1c19 service nova] Lock "e3d52cbd-e768-4425-b83e-180a6e58fd00-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.779787] env[63345]: DEBUG oslo_concurrency.lockutils [req-505fbf71-b883-4608-bd33-bb1acbcac5fe req-e5cf1af8-c8d0-4160-9d77-683b665a1c19 service nova] Lock "e3d52cbd-e768-4425-b83e-180a6e58fd00-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.779956] env[63345]: DEBUG nova.compute.manager [req-505fbf71-b883-4608-bd33-bb1acbcac5fe req-e5cf1af8-c8d0-4160-9d77-683b665a1c19 service nova] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] No waiting events found dispatching network-vif-plugged-b14c7ab5-3080-4f38-a677-17206714df35 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 758.780145] env[63345]: WARNING nova.compute.manager [req-505fbf71-b883-4608-bd33-bb1acbcac5fe req-e5cf1af8-c8d0-4160-9d77-683b665a1c19 service nova] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Received unexpected event network-vif-plugged-b14c7ab5-3080-4f38-a677-17206714df35 for instance with vm_state building and task_state spawning. [ 758.780332] env[63345]: DEBUG nova.compute.manager [req-505fbf71-b883-4608-bd33-bb1acbcac5fe req-e5cf1af8-c8d0-4160-9d77-683b665a1c19 service nova] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Received event network-changed-b14c7ab5-3080-4f38-a677-17206714df35 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 758.780516] env[63345]: DEBUG nova.compute.manager [req-505fbf71-b883-4608-bd33-bb1acbcac5fe req-e5cf1af8-c8d0-4160-9d77-683b665a1c19 service nova] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Refreshing instance network info cache due to event network-changed-b14c7ab5-3080-4f38-a677-17206714df35. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 758.780691] env[63345]: DEBUG oslo_concurrency.lockutils [req-505fbf71-b883-4608-bd33-bb1acbcac5fe req-e5cf1af8-c8d0-4160-9d77-683b665a1c19 service nova] Acquiring lock "refresh_cache-e3d52cbd-e768-4425-b83e-180a6e58fd00" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.877018] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016979, 'name': PowerOffVM_Task, 'duration_secs': 0.259087} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.877332] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 758.877700] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 758.878382] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7021d01-073e-40ff-8ff3-774084c9577b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.885493] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 758.888040] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f728377-7941-43d7-82db-f405be6356f0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.939505] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8f0def-6cb1-4dac-8381-41ef9be87f64 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.942881] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "refresh_cache-e3d52cbd-e768-4425-b83e-180a6e58fd00" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.943236] env[63345]: DEBUG nova.compute.manager [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Instance network_info: |[{"id": "b14c7ab5-3080-4f38-a677-17206714df35", "address": "fa:16:3e:3e:08:ff", "network": {"id": "f05df594-fc76-4e2d-b29b-6942fee8dc99", "bridge": "br-int", "label": "tempest-ServersTestJSON-241206779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63d7b3facae6416989f763e610cf98f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb14c7ab5-30", "ovs_interfaceid": "b14c7ab5-3080-4f38-a677-17206714df35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 758.943557] env[63345]: DEBUG oslo_concurrency.lockutils [req-505fbf71-b883-4608-bd33-bb1acbcac5fe req-e5cf1af8-c8d0-4160-9d77-683b665a1c19 service nova] Acquired lock "refresh_cache-e3d52cbd-e768-4425-b83e-180a6e58fd00" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.943750] env[63345]: DEBUG nova.network.neutron [req-505fbf71-b883-4608-bd33-bb1acbcac5fe req-e5cf1af8-c8d0-4160-9d77-683b665a1c19 service nova] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Refreshing network info cache for port b14c7ab5-3080-4f38-a677-17206714df35 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 758.945048] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:08:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b14c7ab5-3080-4f38-a677-17206714df35', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 758.953419] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Creating folder: Project (63d7b3facae6416989f763e610cf98f7). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 758.957755] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e4e417b-1b9c-43b3-a1dc-7fe729b3fbf3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.959488] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 758.959694] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Deleting contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 758.959882] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Deleting the datastore file [datastore1] 778faa4f-4c5f-4ec2-b17b-5d7513c9c218 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 758.962379] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f18bf57-1e4a-4a1f-bd25-78ee221409b5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.968019] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7394045-99cd-4abb-853a-6c556379c016 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.974248] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 758.974248] env[63345]: value = "task-1016982" [ 758.974248] env[63345]: _type = "Task" [ 758.974248] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.974772] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Created folder: Project (63d7b3facae6416989f763e610cf98f7) in parent group-v225918. [ 758.975109] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Creating folder: Instances. Parent ref: group-v226017. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 759.003917] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60491903-d954-41d4-8a2c-6ff50606df94 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.013153] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316b505d-0106-49f7-9779-d4f53ed89376 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.022463] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016982, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.026650] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dcdb405-7a83-4756-a804-213ceca07342 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.030784] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Created folder: Instances in parent group-v226017. [ 759.030902] env[63345]: DEBUG oslo.service.loopingcall [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 759.031850] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 759.031850] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a6bbe21-01ac-4d90-9042-f47bf31a7510 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.056981] env[63345]: DEBUG nova.compute.provider_tree [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.063508] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 759.063508] env[63345]: value = "task-1016984" [ 759.063508] env[63345]: _type = "Task" [ 759.063508] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.071303] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016984, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.107442] env[63345]: DEBUG oslo_concurrency.lockutils [req-a7fc72fb-4095-40a2-a463-f56bca3ee2bb req-a5796da0-98f2-4fe8-aec4-aecc1b5b04c6 service nova] Releasing lock "refresh_cache-93112cc1-f9a1-4188-9555-bddf483426a1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.107949] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquired lock "refresh_cache-93112cc1-f9a1-4188-9555-bddf483426a1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.108050] env[63345]: DEBUG nova.network.neutron [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 759.215109] env[63345]: DEBUG nova.network.neutron [req-505fbf71-b883-4608-bd33-bb1acbcac5fe req-e5cf1af8-c8d0-4160-9d77-683b665a1c19 service nova] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Updated VIF entry in instance network info cache for port b14c7ab5-3080-4f38-a677-17206714df35. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 759.215109] env[63345]: DEBUG nova.network.neutron [req-505fbf71-b883-4608-bd33-bb1acbcac5fe req-e5cf1af8-c8d0-4160-9d77-683b665a1c19 service nova] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Updating instance_info_cache with network_info: [{"id": "b14c7ab5-3080-4f38-a677-17206714df35", "address": "fa:16:3e:3e:08:ff", "network": {"id": "f05df594-fc76-4e2d-b29b-6942fee8dc99", "bridge": "br-int", "label": "tempest-ServersTestJSON-241206779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63d7b3facae6416989f763e610cf98f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb14c7ab5-30", "ovs_interfaceid": "b14c7ab5-3080-4f38-a677-17206714df35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.513396] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016982, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.194632} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.513705] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 759.513962] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Deleted contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 759.514233] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 759.562551] env[63345]: DEBUG nova.scheduler.client.report [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 759.576656] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016984, 'name': CreateVM_Task, 'duration_secs': 0.448802} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.576904] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 759.577718] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.577986] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.578311] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 759.578600] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3adfaa5c-fc48-4288-a76b-87b352ce284d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.584168] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 759.584168] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5227e4e1-db82-520a-0734-c23aa661762b" [ 759.584168] env[63345]: _type = "Task" [ 759.584168] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.594021] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5227e4e1-db82-520a-0734-c23aa661762b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.633071] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Acquiring lock "46d3332a-bfb9-4812-8201-a87467ce5151" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.633408] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Lock "46d3332a-bfb9-4812-8201-a87467ce5151" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.633651] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Acquiring lock "46d3332a-bfb9-4812-8201-a87467ce5151-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.633865] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Lock "46d3332a-bfb9-4812-8201-a87467ce5151-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.634061] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Lock "46d3332a-bfb9-4812-8201-a87467ce5151-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.636381] env[63345]: INFO nova.compute.manager [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Terminating instance [ 759.652015] env[63345]: DEBUG nova.network.neutron [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 759.718394] env[63345]: DEBUG oslo_concurrency.lockutils [req-505fbf71-b883-4608-bd33-bb1acbcac5fe req-e5cf1af8-c8d0-4160-9d77-683b665a1c19 service nova] Releasing lock "refresh_cache-e3d52cbd-e768-4425-b83e-180a6e58fd00" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.960706] env[63345]: DEBUG nova.network.neutron [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Updating instance_info_cache with network_info: [{"id": "b8d03603-203a-4bdb-ac34-d490f6f611b2", "address": "fa:16:3e:a7:2d:46", "network": {"id": "8f47abc5-e642-4441-94a8-79deff38af77", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-595220153", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4919485c7e184230b38e703f7ce8a047", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8d03603-20", "ovs_interfaceid": "b8d03603-203a-4bdb-ac34-d490f6f611b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e5fd2647-d313-4830-9b9d-0722b78abadb", "address": "fa:16:3e:5d:e2:c3", "network": {"id": "3fa39fff-d12b-4643-8c05-f173230d2734", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1176319450", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "4919485c7e184230b38e703f7ce8a047", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5fd2647-d3", "ovs_interfaceid": "e5fd2647-d313-4830-9b9d-0722b78abadb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.068625] env[63345]: DEBUG oslo_concurrency.lockutils [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.090s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.070997] env[63345]: DEBUG oslo_concurrency.lockutils [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.229s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.072338] env[63345]: INFO nova.compute.claims [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 760.095171] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5227e4e1-db82-520a-0734-c23aa661762b, 'name': SearchDatastore_Task, 'duration_secs': 0.018668} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.096211] env[63345]: INFO nova.scheduler.client.report [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Deleted allocations for instance 34e0234c-36c4-4878-979b-46f045bd1785 [ 760.098095] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.098373] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 760.098793] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.098956] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.099165] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 760.101862] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a0f4938d-fc19-48b5-8a6c-fea43eb138e1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.111609] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 760.111834] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 760.112614] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fbe5178-fad4-4a62-9c10-ef06d03423dc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.118891] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 760.118891] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d4c6c4-b80d-9dd1-7920-d7849e0e27b1" [ 760.118891] env[63345]: _type = "Task" [ 760.118891] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.130384] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d4c6c4-b80d-9dd1-7920-d7849e0e27b1, 'name': SearchDatastore_Task, 'duration_secs': 0.008348} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.131206] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78a507a2-2799-4fba-b7b5-a2d9ad20a99a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.136312] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 760.136312] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]523d6b16-7c5e-537d-b49f-3acce7e554c0" [ 760.136312] env[63345]: _type = "Task" [ 760.136312] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.140850] env[63345]: DEBUG nova.compute.manager [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 760.141092] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 760.141936] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59b75c4-11b0-483a-b8c8-2b6d4b1e59a5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.150237] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]523d6b16-7c5e-537d-b49f-3acce7e554c0, 'name': SearchDatastore_Task, 'duration_secs': 0.009105} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.152507] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.152799] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] e3d52cbd-e768-4425-b83e-180a6e58fd00/e3d52cbd-e768-4425-b83e-180a6e58fd00.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 760.153390] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 760.153635] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed3e4f5f-b205-4bdd-b96b-7baa00a9cc88 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.158063] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab84a513-94d3-4405-a7f9-380442dd574b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.162047] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 760.162047] env[63345]: value = "task-1016985" [ 760.162047] env[63345]: _type = "Task" [ 760.162047] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.163959] env[63345]: DEBUG oslo_vmware.api [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Waiting for the task: (returnval){ [ 760.163959] env[63345]: value = "task-1016986" [ 760.163959] env[63345]: _type = "Task" [ 760.163959] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.176680] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1016985, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.180374] env[63345]: DEBUG oslo_vmware.api [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': task-1016986, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.463912] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Releasing lock "refresh_cache-93112cc1-f9a1-4188-9555-bddf483426a1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.464254] env[63345]: DEBUG nova.compute.manager [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Instance network_info: |[{"id": "b8d03603-203a-4bdb-ac34-d490f6f611b2", "address": "fa:16:3e:a7:2d:46", "network": {"id": "8f47abc5-e642-4441-94a8-79deff38af77", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-595220153", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4919485c7e184230b38e703f7ce8a047", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8d03603-20", "ovs_interfaceid": "b8d03603-203a-4bdb-ac34-d490f6f611b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e5fd2647-d313-4830-9b9d-0722b78abadb", "address": "fa:16:3e:5d:e2:c3", "network": {"id": "3fa39fff-d12b-4643-8c05-f173230d2734", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1176319450", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "4919485c7e184230b38e703f7ce8a047", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5fd2647-d3", "ovs_interfaceid": "e5fd2647-d313-4830-9b9d-0722b78abadb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 760.464813] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:2d:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b151a0c-aa46-4d21-9ef5-c09cf350b19c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b8d03603-203a-4bdb-ac34-d490f6f611b2', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:e2:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19671de9-8b5b-4710-adc3-7419f3c0f171', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e5fd2647-d313-4830-9b9d-0722b78abadb', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 760.480137] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Creating folder: Project (4919485c7e184230b38e703f7ce8a047). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 760.480137] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-515c68d5-40e7-4b79-87c6-bca6b86fa209 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.489485] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Created folder: Project (4919485c7e184230b38e703f7ce8a047) in parent group-v225918. [ 760.490164] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Creating folder: Instances. Parent ref: group-v226020. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 760.490760] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-54ca0476-404b-4e42-b61f-18bd9055af39 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.504969] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Created folder: Instances in parent group-v226020. [ 760.506021] env[63345]: DEBUG oslo.service.loopingcall [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 760.506583] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 760.507149] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c95ff13c-febd-408b-9d60-ea2b70283806 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.536569] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 760.536569] env[63345]: value = "task-1016989" [ 760.536569] env[63345]: _type = "Task" [ 760.536569] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.547567] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016989, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.550147] env[63345]: DEBUG nova.virt.hardware [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 760.550415] env[63345]: DEBUG nova.virt.hardware [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 760.550610] env[63345]: DEBUG nova.virt.hardware [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 760.550812] env[63345]: DEBUG nova.virt.hardware [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 760.550967] env[63345]: DEBUG nova.virt.hardware [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 760.551139] env[63345]: DEBUG nova.virt.hardware [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 760.551358] env[63345]: DEBUG nova.virt.hardware [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 760.551531] env[63345]: DEBUG nova.virt.hardware [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 760.551706] env[63345]: DEBUG nova.virt.hardware [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 760.551877] env[63345]: DEBUG nova.virt.hardware [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 760.552074] env[63345]: DEBUG nova.virt.hardware [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 760.552934] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f81add-c350-4c04-b178-f2c6d67761ea {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.560753] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ce78db-063a-48a3-b034-a1be342e6ea8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.582335] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:8a:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ddfb706a-add1-4e16-9ac4-d20b16a1df6d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 760.589935] env[63345]: DEBUG oslo.service.loopingcall [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 760.591191] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 760.591449] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-40eca205-f288-4c1f-9fae-91f47eef651f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.609551] env[63345]: DEBUG oslo_concurrency.lockutils [None req-77605977-b262-459e-a8bb-b5d33eecfcbe tempest-MultipleCreateTestJSON-712956919 tempest-MultipleCreateTestJSON-712956919-project-member] Lock "34e0234c-36c4-4878-979b-46f045bd1785" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.062s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.616418] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 760.616418] env[63345]: value = "task-1016990" [ 760.616418] env[63345]: _type = "Task" [ 760.616418] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.628570] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016990, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.679275] env[63345]: DEBUG oslo_vmware.api [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': task-1016986, 'name': PowerOffVM_Task, 'duration_secs': 0.256961} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.682828] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 760.683099] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 760.683379] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1016985, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.683956] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-630c8574-fea8-4292-a8a9-9702d824f6aa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.765035] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 760.765168] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 760.765397] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Deleting the datastore file [datastore2] 46d3332a-bfb9-4812-8201-a87467ce5151 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 760.765680] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-855c2423-a5a6-446c-878a-ef388f64f198 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.773090] env[63345]: DEBUG oslo_vmware.api [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Waiting for the task: (returnval){ [ 760.773090] env[63345]: value = "task-1016992" [ 760.773090] env[63345]: _type = "Task" [ 760.773090] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.782652] env[63345]: DEBUG oslo_vmware.api [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': task-1016992, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.048110] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016989, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.126317] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016990, 'name': CreateVM_Task, 'duration_secs': 0.502643} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.126492] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 761.127266] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.127443] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.127771] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 761.130678] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64406698-6b76-49bb-b406-743648793e20 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.135617] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 761.135617] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]527bd19c-a927-e96e-99d0-e429d03c5e60" [ 761.135617] env[63345]: _type = "Task" [ 761.135617] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.147515] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]527bd19c-a927-e96e-99d0-e429d03c5e60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.176746] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1016985, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.561088} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.179415] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] e3d52cbd-e768-4425-b83e-180a6e58fd00/e3d52cbd-e768-4425-b83e-180a6e58fd00.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 761.179672] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 761.180120] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-51a1d3b3-183a-4056-aa74-10d22766bb0e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.187461] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 761.187461] env[63345]: value = "task-1016993" [ 761.187461] env[63345]: _type = "Task" [ 761.187461] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.200354] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1016993, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.286485] env[63345]: DEBUG oslo_vmware.api [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Task: {'id': task-1016992, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217102} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.286751] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 761.286939] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 761.287141] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 761.287325] env[63345]: INFO nova.compute.manager [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Took 1.15 seconds to destroy the instance on the hypervisor. [ 761.287705] env[63345]: DEBUG oslo.service.loopingcall [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 761.287779] env[63345]: DEBUG nova.compute.manager [-] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 761.287842] env[63345]: DEBUG nova.network.neutron [-] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 761.522094] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da086f2f-344a-4807-8ca5-1f4972108cc2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.535312] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c528af05-2d9a-4f84-93d0-7ed0056f07be {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.548781] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1016989, 'name': CreateVM_Task, 'duration_secs': 0.554569} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.579025] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 761.579025] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.579025] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d750b2ea-0060-45da-aff5-26c90e615473 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.586191] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b58fc9-8e6a-4bc5-861a-c5460244cd6d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.601500] env[63345]: DEBUG nova.compute.provider_tree [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 761.649383] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]527bd19c-a927-e96e-99d0-e429d03c5e60, 'name': SearchDatastore_Task, 'duration_secs': 0.010878} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.649647] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.649888] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 761.650163] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.650298] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.650520] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 761.650833] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.651157] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 761.652116] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee92a92b-c908-42c0-801b-0891baafe95a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.655856] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ef67d4f-7eae-41ab-a9ae-79a43f408099 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.666995] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Waiting for the task: (returnval){ [ 761.666995] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]520348be-12db-3589-ea09-d1360dc9d0d4" [ 761.666995] env[63345]: _type = "Task" [ 761.666995] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.669484] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 761.669843] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 761.676101] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fdbf853-cab6-4d6f-ae11-95aceae60c7b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.692698] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]520348be-12db-3589-ea09-d1360dc9d0d4, 'name': SearchDatastore_Task, 'duration_secs': 0.012165} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.698097] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.698561] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 761.698603] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.698867] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 761.698867] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52a05e78-7200-e52a-46a7-70a984774fe2" [ 761.698867] env[63345]: _type = "Task" [ 761.698867] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.707257] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1016993, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073856} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.708252] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 761.710074] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719a3249-d03a-4842-beb5-220f9d491124 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.716212] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a05e78-7200-e52a-46a7-70a984774fe2, 'name': SearchDatastore_Task, 'duration_secs': 0.011588} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.717658] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31a7a80b-80d7-47ee-ac5d-940cb29a78bb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.740585] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] e3d52cbd-e768-4425-b83e-180a6e58fd00/e3d52cbd-e768-4425-b83e-180a6e58fd00.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 761.741308] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-684567a1-e97b-4883-b277-0f4e12a84470 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.759127] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 761.759127] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5201e875-adef-bb3b-b22a-755009065da0" [ 761.759127] env[63345]: _type = "Task" [ 761.759127] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.766386] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 761.766386] env[63345]: value = "task-1016994" [ 761.766386] env[63345]: _type = "Task" [ 761.766386] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.772028] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5201e875-adef-bb3b-b22a-755009065da0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.782636] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1016994, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.916225] env[63345]: DEBUG nova.compute.manager [req-b93f5385-e055-47b5-9f18-84c1ee35a03b req-f0afce61-cf3c-4ed5-b736-6eba42e66a4e service nova] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Received event network-vif-deleted-6fb848af-6632-4cdf-847d-138fe30c4a08 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 761.916359] env[63345]: INFO nova.compute.manager [req-b93f5385-e055-47b5-9f18-84c1ee35a03b req-f0afce61-cf3c-4ed5-b736-6eba42e66a4e service nova] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Neutron deleted interface 6fb848af-6632-4cdf-847d-138fe30c4a08; detaching it from the instance and deleting it from the info cache [ 761.916552] env[63345]: DEBUG nova.network.neutron [req-b93f5385-e055-47b5-9f18-84c1ee35a03b req-f0afce61-cf3c-4ed5-b736-6eba42e66a4e service nova] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.141593] env[63345]: DEBUG nova.scheduler.client.report [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 80 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 762.141923] env[63345]: DEBUG nova.compute.provider_tree [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 80 to 81 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 762.142124] env[63345]: DEBUG nova.compute.provider_tree [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 762.273207] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5201e875-adef-bb3b-b22a-755009065da0, 'name': SearchDatastore_Task, 'duration_secs': 0.029602} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.273930] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.274228] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 778faa4f-4c5f-4ec2-b17b-5d7513c9c218/778faa4f-4c5f-4ec2-b17b-5d7513c9c218.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 762.274525] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.274720] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 762.274938] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac371206-b66a-4c07-b5f8-ce5fafad5e39 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.279869] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7a0b7cb-fcba-4bed-a483-b0f029166398 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.281745] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1016994, 'name': ReconfigVM_Task, 'duration_secs': 0.317513} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.282364] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Reconfigured VM instance instance-00000037 to attach disk [datastore2] e3d52cbd-e768-4425-b83e-180a6e58fd00/e3d52cbd-e768-4425-b83e-180a6e58fd00.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 762.283322] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4c891463-342f-409c-8a75-40c6a24f6e08 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.287028] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 762.287028] env[63345]: value = "task-1016995" [ 762.287028] env[63345]: _type = "Task" [ 762.287028] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.291789] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 762.291789] env[63345]: value = "task-1016996" [ 762.291789] env[63345]: _type = "Task" [ 762.291789] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.292406] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 762.292578] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 762.293662] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77196610-dfda-497d-b774-6035e88f17bc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.301636] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016995, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.304724] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Waiting for the task: (returnval){ [ 762.304724] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52aa7191-b122-44d5-2ffc-397fbaf9618d" [ 762.304724] env[63345]: _type = "Task" [ 762.304724] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.307886] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1016996, 'name': Rename_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.315950] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52aa7191-b122-44d5-2ffc-397fbaf9618d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.342526] env[63345]: DEBUG nova.network.neutron [-] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.422556] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-77a5661b-f07d-4c62-9dcc-a920a5ae2335 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.435311] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10fc0111-d22b-4c47-8393-b6b6c33277c4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.488462] env[63345]: DEBUG nova.compute.manager [req-b93f5385-e055-47b5-9f18-84c1ee35a03b req-f0afce61-cf3c-4ed5-b736-6eba42e66a4e service nova] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Detach interface failed, port_id=6fb848af-6632-4cdf-847d-138fe30c4a08, reason: Instance 46d3332a-bfb9-4812-8201-a87467ce5151 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 762.647431] env[63345]: DEBUG oslo_concurrency.lockutils [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.577s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.648270] env[63345]: DEBUG nova.compute.manager [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 762.652038] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.650s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.653964] env[63345]: INFO nova.compute.claims [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 762.800121] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016995, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.809848] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1016996, 'name': Rename_Task, 'duration_secs': 0.166182} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.818132] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 762.818557] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a2de943c-a305-490d-9b9c-2a0069001095 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.827735] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52aa7191-b122-44d5-2ffc-397fbaf9618d, 'name': SearchDatastore_Task, 'duration_secs': 0.012215} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.828962] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4460a4a4-e4fd-4cb3-8814-06c6ad5bb610 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.834708] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 762.834708] env[63345]: value = "task-1016997" [ 762.834708] env[63345]: _type = "Task" [ 762.834708] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.840256] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Waiting for the task: (returnval){ [ 762.840256] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d8dd65-57a6-c14e-7713-afafc0848c98" [ 762.840256] env[63345]: _type = "Task" [ 762.840256] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.847437] env[63345]: INFO nova.compute.manager [-] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Took 1.56 seconds to deallocate network for instance. [ 762.847801] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1016997, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.853680] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d8dd65-57a6-c14e-7713-afafc0848c98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.164236] env[63345]: DEBUG nova.compute.utils [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 763.166375] env[63345]: DEBUG nova.compute.manager [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 763.166574] env[63345]: DEBUG nova.network.neutron [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 763.225529] env[63345]: DEBUG nova.policy [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f522b76a59a649a0a8570a4e8b8da753', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2b389a73e7804452b23d8c00bedd0362', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 763.299448] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016995, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.647507} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.299818] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 778faa4f-4c5f-4ec2-b17b-5d7513c9c218/778faa4f-4c5f-4ec2-b17b-5d7513c9c218.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 763.300063] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 763.300358] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e53178e-e77d-424c-ab0d-400ce8b429c5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.307292] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 763.307292] env[63345]: value = "task-1016998" [ 763.307292] env[63345]: _type = "Task" [ 763.307292] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.315666] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016998, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.348238] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1016997, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.353797] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d8dd65-57a6-c14e-7713-afafc0848c98, 'name': SearchDatastore_Task, 'duration_secs': 0.056773} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.354067] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.354333] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 93112cc1-f9a1-4188-9555-bddf483426a1/93112cc1-f9a1-4188-9555-bddf483426a1.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 763.354636] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-01ba609b-1349-47f1-99bf-9162985dc415 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.362060] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.362383] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Waiting for the task: (returnval){ [ 763.362383] env[63345]: value = "task-1016999" [ 763.362383] env[63345]: _type = "Task" [ 763.362383] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.372179] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': task-1016999, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.503115] env[63345]: DEBUG nova.network.neutron [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Successfully created port: a44ad561-3547-45fd-a941-c72ff5211989 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 763.671174] env[63345]: DEBUG nova.compute.manager [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 763.822050] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1016998, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.211829} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.822366] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 763.823344] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7158ab6e-b513-4579-a3a3-56b595fb9721 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.851990] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] 778faa4f-4c5f-4ec2-b17b-5d7513c9c218/778faa4f-4c5f-4ec2-b17b-5d7513c9c218.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 763.859853] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ef2302b-7553-4394-941c-b23555d90e73 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.883813] env[63345]: DEBUG oslo_vmware.api [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1016997, 'name': PowerOnVM_Task, 'duration_secs': 0.870589} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.885826] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 763.886023] env[63345]: INFO nova.compute.manager [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Took 7.27 seconds to spawn the instance on the hypervisor. [ 763.886306] env[63345]: DEBUG nova.compute.manager [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 763.886651] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 763.886651] env[63345]: value = "task-1017000" [ 763.886651] env[63345]: _type = "Task" [ 763.886651] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.887631] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba666d3-3308-42e2-a1a7-284cd09577ff {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.900786] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': task-1016999, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.913479] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017000, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.162649] env[63345]: DEBUG oslo_vmware.rw_handles [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5250a575-72fb-19e5-6687-e54f1cb00fa1/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 764.163622] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aacb734-b6eb-4505-8bae-ec7af57e0188 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.170049] env[63345]: DEBUG oslo_vmware.rw_handles [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5250a575-72fb-19e5-6687-e54f1cb00fa1/disk-0.vmdk is in state: ready. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 764.170049] env[63345]: ERROR oslo_vmware.rw_handles [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5250a575-72fb-19e5-6687-e54f1cb00fa1/disk-0.vmdk due to incomplete transfer. [ 764.172517] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-32a61c39-4420-428a-8eef-e080d47d076b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.182372] env[63345]: DEBUG oslo_vmware.rw_handles [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5250a575-72fb-19e5-6687-e54f1cb00fa1/disk-0.vmdk. {{(pid=63345) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 764.182623] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Uploaded image f0cb364f-cc7f-4213-88ce-b8773612e90e to the Glance image server {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 764.184833] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Destroying the VM {{(pid=63345) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 764.185349] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-256cb61d-ff11-4b8e-8f64-f839ef3fd16a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.189147] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b3077c3-eb16-4587-80c2-c7e1473573f4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.192769] env[63345]: DEBUG oslo_vmware.api [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 764.192769] env[63345]: value = "task-1017001" [ 764.192769] env[63345]: _type = "Task" [ 764.192769] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.199108] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1d9c34-cc19-4226-aaca-ce15c9616560 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.207345] env[63345]: DEBUG oslo_vmware.api [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017001, 'name': Destroy_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.236852] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e30805-2891-443b-8333-de136ca4c1c9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.246663] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446eb837-afd1-4024-ae6a-87f061ec9e7b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.261726] env[63345]: DEBUG nova.compute.provider_tree [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.383962] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': task-1016999, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.678027} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.384517] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 93112cc1-f9a1-4188-9555-bddf483426a1/93112cc1-f9a1-4188-9555-bddf483426a1.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 764.384517] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 764.384744] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5d8547e6-9894-4df3-9738-2be7f327b669 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.391324] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Waiting for the task: (returnval){ [ 764.391324] env[63345]: value = "task-1017002" [ 764.391324] env[63345]: _type = "Task" [ 764.391324] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.402636] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017000, 'name': ReconfigVM_Task, 'duration_secs': 0.386278} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.405968] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Reconfigured VM instance instance-00000035 to attach disk [datastore2] 778faa4f-4c5f-4ec2-b17b-5d7513c9c218/778faa4f-4c5f-4ec2-b17b-5d7513c9c218.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 764.406684] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': task-1017002, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.406944] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3d795c2-6c6b-494a-8213-c346a0e731fa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.412566] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 764.412566] env[63345]: value = "task-1017003" [ 764.412566] env[63345]: _type = "Task" [ 764.412566] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.424613] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017003, 'name': Rename_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.426973] env[63345]: INFO nova.compute.manager [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Took 36.98 seconds to build instance. [ 764.686873] env[63345]: DEBUG nova.compute.manager [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 764.703332] env[63345]: DEBUG oslo_vmware.api [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017001, 'name': Destroy_Task, 'duration_secs': 0.426872} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.703524] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Destroyed the VM [ 764.704364] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Deleting Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 764.704364] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-53d3ce6d-8dd6-4941-b82a-665b45d71e8d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.711082] env[63345]: DEBUG oslo_vmware.api [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 764.711082] env[63345]: value = "task-1017004" [ 764.711082] env[63345]: _type = "Task" [ 764.711082] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.719019] env[63345]: DEBUG nova.virt.hardware [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 764.719019] env[63345]: DEBUG nova.virt.hardware [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 764.719019] env[63345]: DEBUG nova.virt.hardware [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 764.719019] env[63345]: DEBUG nova.virt.hardware [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 764.719019] env[63345]: DEBUG nova.virt.hardware [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 764.719019] env[63345]: DEBUG nova.virt.hardware [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 764.719019] env[63345]: DEBUG nova.virt.hardware [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 764.719473] env[63345]: DEBUG nova.virt.hardware [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 764.719473] env[63345]: DEBUG nova.virt.hardware [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 764.719571] env[63345]: DEBUG nova.virt.hardware [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 764.719799] env[63345]: DEBUG nova.virt.hardware [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 764.721456] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9bb685-4655-4bc4-9338-3017f334b5fa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.730358] env[63345]: DEBUG oslo_vmware.api [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017004, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.734217] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b538bcb4-c67e-42ee-ad39-2b5350d68808 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.764547] env[63345]: DEBUG nova.scheduler.client.report [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 764.903821] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': task-1017002, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.173993} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.904242] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 764.905144] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7686d77-3203-45a0-8f98-510288df218f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.938739] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] 93112cc1-f9a1-4188-9555-bddf483426a1/93112cc1-f9a1-4188-9555-bddf483426a1.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 764.943564] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba170b2b-131c-4bb4-bfa3-151cedee3545 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "e3d52cbd-e768-4425-b83e-180a6e58fd00" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 119.613s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.944073] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be1e70a1-af02-47b8-b6e9-9f1698775464 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.969017] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017003, 'name': Rename_Task, 'duration_secs': 0.174066} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.970818] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 764.971250] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Waiting for the task: (returnval){ [ 764.971250] env[63345]: value = "task-1017005" [ 764.971250] env[63345]: _type = "Task" [ 764.971250] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.971462] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6319d8b2-c29c-4df2-9587-e90cef3702f1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.982087] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': task-1017005, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.983361] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 764.983361] env[63345]: value = "task-1017006" [ 764.983361] env[63345]: _type = "Task" [ 764.983361] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.994256] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017006, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.059465] env[63345]: DEBUG nova.compute.manager [req-e4c26da7-b5f2-4768-8276-7bd455947271 req-60e56ce6-15c9-476c-8425-19aff79c1486 service nova] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Received event network-vif-plugged-a44ad561-3547-45fd-a941-c72ff5211989 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 765.059850] env[63345]: DEBUG oslo_concurrency.lockutils [req-e4c26da7-b5f2-4768-8276-7bd455947271 req-60e56ce6-15c9-476c-8425-19aff79c1486 service nova] Acquiring lock "3a85df04-3997-48a3-8992-f24fe997b3cc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.061366] env[63345]: DEBUG oslo_concurrency.lockutils [req-e4c26da7-b5f2-4768-8276-7bd455947271 req-60e56ce6-15c9-476c-8425-19aff79c1486 service nova] Lock "3a85df04-3997-48a3-8992-f24fe997b3cc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.061366] env[63345]: DEBUG oslo_concurrency.lockutils [req-e4c26da7-b5f2-4768-8276-7bd455947271 req-60e56ce6-15c9-476c-8425-19aff79c1486 service nova] Lock "3a85df04-3997-48a3-8992-f24fe997b3cc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.061366] env[63345]: DEBUG nova.compute.manager [req-e4c26da7-b5f2-4768-8276-7bd455947271 req-60e56ce6-15c9-476c-8425-19aff79c1486 service nova] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] No waiting events found dispatching network-vif-plugged-a44ad561-3547-45fd-a941-c72ff5211989 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 765.061366] env[63345]: WARNING nova.compute.manager [req-e4c26da7-b5f2-4768-8276-7bd455947271 req-60e56ce6-15c9-476c-8425-19aff79c1486 service nova] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Received unexpected event network-vif-plugged-a44ad561-3547-45fd-a941-c72ff5211989 for instance with vm_state building and task_state spawning. [ 765.128045] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "e3d52cbd-e768-4425-b83e-180a6e58fd00" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.128045] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "e3d52cbd-e768-4425-b83e-180a6e58fd00" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.128045] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "e3d52cbd-e768-4425-b83e-180a6e58fd00-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.128045] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "e3d52cbd-e768-4425-b83e-180a6e58fd00-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.128045] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "e3d52cbd-e768-4425-b83e-180a6e58fd00-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.131187] env[63345]: INFO nova.compute.manager [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Terminating instance [ 765.220718] env[63345]: DEBUG oslo_vmware.api [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017004, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.270285] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.618s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.270662] env[63345]: DEBUG nova.compute.manager [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 765.273686] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 27.771s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.273907] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.275911] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63345) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 765.276327] env[63345]: DEBUG oslo_concurrency.lockutils [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.080s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.279368] env[63345]: INFO nova.compute.claims [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 765.290024] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d15d08-3ec5-40f5-991e-d7534328917e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.304149] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb0f9a5f-b8cf-4457-b963-7b3d479c7f64 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.319424] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7948997c-5d33-4807-b4c5-372103968574 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.328112] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63fb5e82-a81c-4edc-8cfe-aafda1672286 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.363216] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180066MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=63345) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 765.363402] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.483601] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': task-1017005, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.492744] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017006, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.637883] env[63345]: DEBUG nova.compute.manager [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 765.638263] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 765.639283] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1233c1-7ff2-4d8f-80d2-068bf738835d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.649294] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 765.649294] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00496a3e-64dd-41fa-97cd-162414c5f2c1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.655357] env[63345]: DEBUG oslo_vmware.api [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 765.655357] env[63345]: value = "task-1017007" [ 765.655357] env[63345]: _type = "Task" [ 765.655357] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.659214] env[63345]: DEBUG nova.network.neutron [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Successfully updated port: a44ad561-3547-45fd-a941-c72ff5211989 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 765.666070] env[63345]: DEBUG oslo_vmware.api [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017007, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.720932] env[63345]: DEBUG nova.compute.manager [req-aa1ea885-10b1-4fa4-8f72-16014bf34e1c req-604ff5ad-a54d-4ad5-a992-bd9c057a860a service nova] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Received event network-changed-a44ad561-3547-45fd-a941-c72ff5211989 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 765.721278] env[63345]: DEBUG nova.compute.manager [req-aa1ea885-10b1-4fa4-8f72-16014bf34e1c req-604ff5ad-a54d-4ad5-a992-bd9c057a860a service nova] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Refreshing instance network info cache due to event network-changed-a44ad561-3547-45fd-a941-c72ff5211989. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 765.721751] env[63345]: DEBUG oslo_concurrency.lockutils [req-aa1ea885-10b1-4fa4-8f72-16014bf34e1c req-604ff5ad-a54d-4ad5-a992-bd9c057a860a service nova] Acquiring lock "refresh_cache-3a85df04-3997-48a3-8992-f24fe997b3cc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.722475] env[63345]: DEBUG oslo_concurrency.lockutils [req-aa1ea885-10b1-4fa4-8f72-16014bf34e1c req-604ff5ad-a54d-4ad5-a992-bd9c057a860a service nova] Acquired lock "refresh_cache-3a85df04-3997-48a3-8992-f24fe997b3cc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.722475] env[63345]: DEBUG nova.network.neutron [req-aa1ea885-10b1-4fa4-8f72-16014bf34e1c req-604ff5ad-a54d-4ad5-a992-bd9c057a860a service nova] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Refreshing network info cache for port a44ad561-3547-45fd-a941-c72ff5211989 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 765.730589] env[63345]: DEBUG oslo_vmware.api [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017004, 'name': RemoveSnapshot_Task, 'duration_secs': 0.726236} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.731811] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Deleted Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 765.731811] env[63345]: INFO nova.compute.manager [None req-58b0f415-d91e-4f00-be28-2ecc3e050fdb tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Took 15.63 seconds to snapshot the instance on the hypervisor. [ 765.790247] env[63345]: DEBUG nova.compute.utils [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 765.794256] env[63345]: DEBUG nova.compute.manager [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 765.794256] env[63345]: DEBUG nova.network.neutron [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 765.859175] env[63345]: DEBUG nova.policy [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f522b76a59a649a0a8570a4e8b8da753', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2b389a73e7804452b23d8c00bedd0362', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 765.986852] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': task-1017005, 'name': ReconfigVM_Task, 'duration_secs': 0.845114} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.992796] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Reconfigured VM instance instance-00000036 to attach disk [datastore2] 93112cc1-f9a1-4188-9555-bddf483426a1/93112cc1-f9a1-4188-9555-bddf483426a1.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 765.993240] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-02d151b9-5777-4aab-ac97-16edf7004628 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.001985] env[63345]: DEBUG oslo_vmware.api [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017006, 'name': PowerOnVM_Task, 'duration_secs': 0.52896} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.003708] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 766.003948] env[63345]: DEBUG nova.compute.manager [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 766.004546] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Waiting for the task: (returnval){ [ 766.004546] env[63345]: value = "task-1017008" [ 766.004546] env[63345]: _type = "Task" [ 766.004546] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.005762] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988f0d26-2e88-4392-b067-298fad53fad8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.021225] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': task-1017008, 'name': Rename_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.161893] env[63345]: DEBUG oslo_concurrency.lockutils [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "refresh_cache-3a85df04-3997-48a3-8992-f24fe997b3cc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.172248] env[63345]: DEBUG oslo_vmware.api [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017007, 'name': PowerOffVM_Task, 'duration_secs': 0.351143} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.172248] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 766.173066] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 766.173442] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f255bac4-89a2-461b-a6cd-936e65135872 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.260698] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 766.261203] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 766.261303] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleting the datastore file [datastore2] e3d52cbd-e768-4425-b83e-180a6e58fd00 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 766.261599] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61da5e32-34ee-42e4-a853-bb0646453888 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.273929] env[63345]: DEBUG oslo_vmware.api [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 766.273929] env[63345]: value = "task-1017010" [ 766.273929] env[63345]: _type = "Task" [ 766.273929] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.327414] env[63345]: DEBUG nova.network.neutron [req-aa1ea885-10b1-4fa4-8f72-16014bf34e1c req-604ff5ad-a54d-4ad5-a992-bd9c057a860a service nova] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 766.327414] env[63345]: DEBUG nova.compute.manager [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 766.327414] env[63345]: DEBUG oslo_vmware.api [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017010, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.351392] env[63345]: DEBUG nova.network.neutron [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Successfully created port: 9dc2d1aa-5968-48ba-9b48-23f87e1e9419 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 766.454126] env[63345]: DEBUG nova.network.neutron [req-aa1ea885-10b1-4fa4-8f72-16014bf34e1c req-604ff5ad-a54d-4ad5-a992-bd9c057a860a service nova] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.472647] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "4f108dcc-c130-4c3f-840d-7a912150db3f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.472800] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "4f108dcc-c130-4c3f-840d-7a912150db3f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.527182] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': task-1017008, 'name': Rename_Task, 'duration_secs': 0.211406} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.536866] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 766.537341] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.537578] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d801be9c-3d1a-4a6b-ba48-9f608e44c93f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.544333] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Waiting for the task: (returnval){ [ 766.544333] env[63345]: value = "task-1017011" [ 766.544333] env[63345]: _type = "Task" [ 766.544333] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.556387] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': task-1017011, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.792059] env[63345]: DEBUG oslo_vmware.api [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017010, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147682} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.792059] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 766.792059] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 766.792059] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 766.792059] env[63345]: INFO nova.compute.manager [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Took 1.15 seconds to destroy the instance on the hypervisor. [ 766.792059] env[63345]: DEBUG oslo.service.loopingcall [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 766.792059] env[63345]: DEBUG nova.compute.manager [-] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 766.792059] env[63345]: DEBUG nova.network.neutron [-] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 766.875354] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54f98fa-07fb-4348-9f39-2776989082fb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.883157] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196dd018-47e7-4159-806c-f2e30ea6ad56 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.918521] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f99456-151d-4f86-8996-f96cd14bc35a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.930689] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6641f74-9055-4c2b-a1c0-c5b1ca06eb14 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.947250] env[63345]: DEBUG nova.compute.provider_tree [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 766.959776] env[63345]: DEBUG oslo_concurrency.lockutils [req-aa1ea885-10b1-4fa4-8f72-16014bf34e1c req-604ff5ad-a54d-4ad5-a992-bd9c057a860a service nova] Releasing lock "refresh_cache-3a85df04-3997-48a3-8992-f24fe997b3cc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.960153] env[63345]: DEBUG oslo_concurrency.lockutils [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired lock "refresh_cache-3a85df04-3997-48a3-8992-f24fe997b3cc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.960759] env[63345]: DEBUG nova.network.neutron [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 766.975653] env[63345]: DEBUG nova.compute.manager [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 767.055982] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': task-1017011, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.308603] env[63345]: DEBUG nova.compute.manager [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 767.341220] env[63345]: DEBUG nova.virt.hardware [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 767.341616] env[63345]: DEBUG nova.virt.hardware [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 767.341811] env[63345]: DEBUG nova.virt.hardware [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 767.342019] env[63345]: DEBUG nova.virt.hardware [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 767.342185] env[63345]: DEBUG nova.virt.hardware [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 767.342342] env[63345]: DEBUG nova.virt.hardware [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 767.342554] env[63345]: DEBUG nova.virt.hardware [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 767.342722] env[63345]: DEBUG nova.virt.hardware [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 767.342897] env[63345]: DEBUG nova.virt.hardware [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 767.343074] env[63345]: DEBUG nova.virt.hardware [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 767.343258] env[63345]: DEBUG nova.virt.hardware [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 767.344873] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8dac16-d53c-49c0-a172-895b3f75ba13 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.357911] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00bce8e1-3812-460f-b9d1-5fbece9aaf21 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.428300] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "778faa4f-4c5f-4ec2-b17b-5d7513c9c218" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.428300] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "778faa4f-4c5f-4ec2-b17b-5d7513c9c218" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.428300] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "778faa4f-4c5f-4ec2-b17b-5d7513c9c218-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.428300] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "778faa4f-4c5f-4ec2-b17b-5d7513c9c218-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.428300] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "778faa4f-4c5f-4ec2-b17b-5d7513c9c218-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.429289] env[63345]: INFO nova.compute.manager [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Terminating instance [ 767.451385] env[63345]: DEBUG nova.scheduler.client.report [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 767.486727] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Acquiring lock "7bef089c-e93b-4ba6-a683-4e076489f92a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.486977] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Lock "7bef089c-e93b-4ba6-a683-4e076489f92a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.487306] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Acquiring lock "7bef089c-e93b-4ba6-a683-4e076489f92a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.487374] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Lock "7bef089c-e93b-4ba6-a683-4e076489f92a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.487605] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Lock "7bef089c-e93b-4ba6-a683-4e076489f92a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.489740] env[63345]: INFO nova.compute.manager [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Terminating instance [ 767.497496] env[63345]: DEBUG nova.compute.manager [req-08ffc0e7-06b0-4ff2-adb0-b21cfdfebfe3 req-82975dc7-2521-48ba-85b9-c8a7f29115e7 service nova] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Received event network-vif-deleted-b14c7ab5-3080-4f38-a677-17206714df35 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 767.497546] env[63345]: INFO nova.compute.manager [req-08ffc0e7-06b0-4ff2-adb0-b21cfdfebfe3 req-82975dc7-2521-48ba-85b9-c8a7f29115e7 service nova] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Neutron deleted interface b14c7ab5-3080-4f38-a677-17206714df35; detaching it from the instance and deleting it from the info cache [ 767.497730] env[63345]: DEBUG nova.network.neutron [req-08ffc0e7-06b0-4ff2-adb0-b21cfdfebfe3 req-82975dc7-2521-48ba-85b9-c8a7f29115e7 service nova] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.502101] env[63345]: DEBUG nova.network.neutron [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 767.508135] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.562023] env[63345]: DEBUG oslo_vmware.api [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': task-1017011, 'name': PowerOnVM_Task, 'duration_secs': 0.524297} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.562023] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 767.562023] env[63345]: INFO nova.compute.manager [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Took 13.80 seconds to spawn the instance on the hypervisor. [ 767.562023] env[63345]: DEBUG nova.compute.manager [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 767.562023] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96927e6a-54ab-44f8-be61-fdeca61d6f2b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.655526] env[63345]: DEBUG nova.network.neutron [-] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.758997] env[63345]: DEBUG nova.network.neutron [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Updating instance_info_cache with network_info: [{"id": "a44ad561-3547-45fd-a941-c72ff5211989", "address": "fa:16:3e:cf:c7:9e", "network": {"id": "ce89b46a-97ec-4f2d-be39-333e9fcf307d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-416012078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b389a73e7804452b23d8c00bedd0362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa44ad561-35", "ovs_interfaceid": "a44ad561-3547-45fd-a941-c72ff5211989", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.936194] env[63345]: DEBUG nova.compute.manager [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 767.937189] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 767.937294] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa81e43-1611-459f-9b80-e092b1bfc443 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.945462] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 767.945734] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2acdc62-bf0a-42d6-890a-46fed97d257d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.952117] env[63345]: DEBUG oslo_vmware.api [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 767.952117] env[63345]: value = "task-1017012" [ 767.952117] env[63345]: _type = "Task" [ 767.952117] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.965255] env[63345]: DEBUG oslo_concurrency.lockutils [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.686s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.965255] env[63345]: DEBUG nova.compute.manager [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 767.966576] env[63345]: DEBUG oslo_vmware.api [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017012, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.966576] env[63345]: DEBUG oslo_concurrency.lockutils [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.534s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.968261] env[63345]: INFO nova.compute.claims [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 768.004333] env[63345]: DEBUG nova.compute.manager [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 768.005016] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 768.005253] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f1eaf97-2fd1-4e53-b2d5-a526ec8b0059 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.008103] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af91f08f-067b-43d2-bbb2-744aca93b94a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.015185] env[63345]: DEBUG oslo_vmware.api [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Waiting for the task: (returnval){ [ 768.015185] env[63345]: value = "task-1017013" [ 768.015185] env[63345]: _type = "Task" [ 768.015185] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.022042] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9390eb-ac4c-4f4f-9ca8-9ef3a89355cc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.038956] env[63345]: DEBUG oslo_vmware.api [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1017013, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.053953] env[63345]: DEBUG nova.compute.manager [req-08ffc0e7-06b0-4ff2-adb0-b21cfdfebfe3 req-82975dc7-2521-48ba-85b9-c8a7f29115e7 service nova] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Detach interface failed, port_id=b14c7ab5-3080-4f38-a677-17206714df35, reason: Instance e3d52cbd-e768-4425-b83e-180a6e58fd00 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 768.081181] env[63345]: INFO nova.compute.manager [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Took 44.39 seconds to build instance. [ 768.111740] env[63345]: DEBUG nova.compute.manager [req-7ed10062-01a2-4b35-a0ce-d5a59058d378 req-e64a4e71-3e36-42bf-972d-7dc5c20c7a13 service nova] [instance: b5173471-3367-42ba-b450-62ad8573f048] Received event network-vif-plugged-9dc2d1aa-5968-48ba-9b48-23f87e1e9419 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 768.111970] env[63345]: DEBUG oslo_concurrency.lockutils [req-7ed10062-01a2-4b35-a0ce-d5a59058d378 req-e64a4e71-3e36-42bf-972d-7dc5c20c7a13 service nova] Acquiring lock "b5173471-3367-42ba-b450-62ad8573f048-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.112664] env[63345]: DEBUG oslo_concurrency.lockutils [req-7ed10062-01a2-4b35-a0ce-d5a59058d378 req-e64a4e71-3e36-42bf-972d-7dc5c20c7a13 service nova] Lock "b5173471-3367-42ba-b450-62ad8573f048-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.112664] env[63345]: DEBUG oslo_concurrency.lockutils [req-7ed10062-01a2-4b35-a0ce-d5a59058d378 req-e64a4e71-3e36-42bf-972d-7dc5c20c7a13 service nova] Lock "b5173471-3367-42ba-b450-62ad8573f048-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.112664] env[63345]: DEBUG nova.compute.manager [req-7ed10062-01a2-4b35-a0ce-d5a59058d378 req-e64a4e71-3e36-42bf-972d-7dc5c20c7a13 service nova] [instance: b5173471-3367-42ba-b450-62ad8573f048] No waiting events found dispatching network-vif-plugged-9dc2d1aa-5968-48ba-9b48-23f87e1e9419 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 768.112773] env[63345]: WARNING nova.compute.manager [req-7ed10062-01a2-4b35-a0ce-d5a59058d378 req-e64a4e71-3e36-42bf-972d-7dc5c20c7a13 service nova] [instance: b5173471-3367-42ba-b450-62ad8573f048] Received unexpected event network-vif-plugged-9dc2d1aa-5968-48ba-9b48-23f87e1e9419 for instance with vm_state building and task_state spawning. [ 768.158296] env[63345]: INFO nova.compute.manager [-] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Took 1.37 seconds to deallocate network for instance. [ 768.172206] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "11652422-9136-4453-b932-06695f9bc910" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.172519] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "11652422-9136-4453-b932-06695f9bc910" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.222839] env[63345]: DEBUG nova.network.neutron [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Successfully updated port: 9dc2d1aa-5968-48ba-9b48-23f87e1e9419 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 768.261300] env[63345]: DEBUG oslo_concurrency.lockutils [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Releasing lock "refresh_cache-3a85df04-3997-48a3-8992-f24fe997b3cc" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.262218] env[63345]: DEBUG nova.compute.manager [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Instance network_info: |[{"id": "a44ad561-3547-45fd-a941-c72ff5211989", "address": "fa:16:3e:cf:c7:9e", "network": {"id": "ce89b46a-97ec-4f2d-be39-333e9fcf307d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-416012078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b389a73e7804452b23d8c00bedd0362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa44ad561-35", "ovs_interfaceid": "a44ad561-3547-45fd-a941-c72ff5211989", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 768.262732] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:c7:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bd3c6b64-aba2-4bdc-a693-3b4dff3ed861', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a44ad561-3547-45fd-a941-c72ff5211989', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 768.272115] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Creating folder: Project (2b389a73e7804452b23d8c00bedd0362). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 768.272771] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dbffd8f6-b6bb-45ef-97b8-7809333d6307 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.283181] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Created folder: Project (2b389a73e7804452b23d8c00bedd0362) in parent group-v225918. [ 768.283386] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Creating folder: Instances. Parent ref: group-v226024. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 768.283628] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb290915-6b5b-485b-a93a-e6041a0049a5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.292265] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Created folder: Instances in parent group-v226024. [ 768.292265] env[63345]: DEBUG oslo.service.loopingcall [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 768.292265] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 768.292446] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ce70e93-7509-4b52-a140-d245b14a41e8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.311880] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 768.311880] env[63345]: value = "task-1017016" [ 768.311880] env[63345]: _type = "Task" [ 768.311880] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.324640] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017016, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.464109] env[63345]: DEBUG oslo_vmware.api [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017012, 'name': PowerOffVM_Task, 'duration_secs': 0.23646} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.464443] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 768.464653] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 768.464927] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7eda2e78-5fd4-4538-a73d-a34100ddf8c9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.469086] env[63345]: DEBUG nova.compute.utils [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 768.472093] env[63345]: DEBUG nova.compute.manager [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 768.472431] env[63345]: DEBUG nova.network.neutron [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 768.528115] env[63345]: DEBUG nova.policy [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c62ea716472f45249517ae2b1318607c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ac5c2a653dae436c97514507939c4e3c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 768.536668] env[63345]: DEBUG oslo_vmware.api [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1017013, 'name': PowerOffVM_Task, 'duration_secs': 0.191611} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.536882] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 768.537083] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Volume detach. Driver type: vmdk {{(pid=63345) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 768.537264] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-225930', 'volume_id': 'a6bb0405-c6c3-4109-b8e1-29b372ad3058', 'name': 'volume-a6bb0405-c6c3-4109-b8e1-29b372ad3058', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7bef089c-e93b-4ba6-a683-4e076489f92a', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6bb0405-c6c3-4109-b8e1-29b372ad3058', 'serial': 'a6bb0405-c6c3-4109-b8e1-29b372ad3058'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 768.538113] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda2556d-d796-4ad7-8a22-85e81adb5a3c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.562197] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84709ec1-6062-4212-960f-da5d4de3639d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.565192] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 768.565418] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 768.565603] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Deleting the datastore file [datastore2] 778faa4f-4c5f-4ec2-b17b-5d7513c9c218 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 768.566276] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4f2796d-e8c4-4252-9da9-3bf57833a1ae {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.573662] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d532cc94-9a8c-4083-afe7-bf678c8cc3c7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.576764] env[63345]: DEBUG oslo_vmware.api [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 768.576764] env[63345]: value = "task-1017018" [ 768.576764] env[63345]: _type = "Task" [ 768.576764] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.598636] env[63345]: DEBUG oslo_concurrency.lockutils [None req-62df24b1-9d69-4471-a53f-f3e88ae74b3f tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Lock "93112cc1-f9a1-4188-9555-bddf483426a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 126.203s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.602938] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8229419f-54a8-45b9-96c6-8d780bcde7ba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.613555] env[63345]: DEBUG oslo_vmware.api [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017018, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.631338] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] The volume has not been displaced from its original location: [datastore1] volume-a6bb0405-c6c3-4109-b8e1-29b372ad3058/volume-a6bb0405-c6c3-4109-b8e1-29b372ad3058.vmdk. No consolidation needed. {{(pid=63345) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 768.638039] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Reconfiguring VM instance instance-00000025 to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 768.638617] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01dec631-94ab-4c75-bcc1-0c3293dd3dec {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.660713] env[63345]: DEBUG oslo_vmware.api [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Waiting for the task: (returnval){ [ 768.660713] env[63345]: value = "task-1017019" [ 768.660713] env[63345]: _type = "Task" [ 768.660713] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.671308] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.671695] env[63345]: DEBUG oslo_vmware.api [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1017019, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.729108] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "refresh_cache-b5173471-3367-42ba-b450-62ad8573f048" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.729108] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired lock "refresh_cache-b5173471-3367-42ba-b450-62ad8573f048" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.729108] env[63345]: DEBUG nova.network.neutron [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 768.826130] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017016, 'name': CreateVM_Task, 'duration_secs': 0.367447} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.826130] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 768.826130] env[63345]: DEBUG oslo_concurrency.lockutils [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.826130] env[63345]: DEBUG oslo_concurrency.lockutils [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.826130] env[63345]: DEBUG oslo_concurrency.lockutils [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 768.826130] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-386b4b29-18ad-4c73-b529-46ab6cb96830 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.830948] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 768.830948] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f2b4a7-78c9-f8c6-6fb3-fbdc967a2dc4" [ 768.830948] env[63345]: _type = "Task" [ 768.830948] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.839428] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f2b4a7-78c9-f8c6-6fb3-fbdc967a2dc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.852989] env[63345]: DEBUG nova.network.neutron [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Successfully created port: 34e64073-eaa9-42e5-b620-b216a15b02d1 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 768.982448] env[63345]: DEBUG nova.compute.manager [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 769.034569] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "9aa651b8-317d-4153-8c33-9df0a5d16115" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.034808] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "9aa651b8-317d-4153-8c33-9df0a5d16115" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.091203] env[63345]: DEBUG oslo_vmware.api [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017018, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162274} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.091475] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 769.091703] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 769.091931] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 769.092086] env[63345]: INFO nova.compute.manager [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Took 1.16 seconds to destroy the instance on the hypervisor. [ 769.092337] env[63345]: DEBUG oslo.service.loopingcall [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 769.092533] env[63345]: DEBUG nova.compute.manager [-] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 769.092629] env[63345]: DEBUG nova.network.neutron [-] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 769.109423] env[63345]: DEBUG nova.compute.manager [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 769.114447] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquiring lock "93112cc1-f9a1-4188-9555-bddf483426a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.114691] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Lock "93112cc1-f9a1-4188-9555-bddf483426a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.114892] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquiring lock "93112cc1-f9a1-4188-9555-bddf483426a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.115095] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Lock "93112cc1-f9a1-4188-9555-bddf483426a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.115271] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Lock "93112cc1-f9a1-4188-9555-bddf483426a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.118016] env[63345]: INFO nova.compute.manager [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Terminating instance [ 769.175999] env[63345]: DEBUG oslo_vmware.api [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1017019, 'name': ReconfigVM_Task, 'duration_secs': 0.223628} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.178596] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Reconfigured VM instance instance-00000025 to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 769.183779] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f444dcb0-167b-43a9-8752-0900b374356c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.200090] env[63345]: DEBUG oslo_vmware.api [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Waiting for the task: (returnval){ [ 769.200090] env[63345]: value = "task-1017020" [ 769.200090] env[63345]: _type = "Task" [ 769.200090] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.210311] env[63345]: DEBUG oslo_vmware.api [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1017020, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.295666] env[63345]: DEBUG nova.network.neutron [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 769.342898] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f2b4a7-78c9-f8c6-6fb3-fbdc967a2dc4, 'name': SearchDatastore_Task, 'duration_secs': 0.012692} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.345468] env[63345]: DEBUG oslo_concurrency.lockutils [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.345468] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 769.345468] env[63345]: DEBUG oslo_concurrency.lockutils [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.345468] env[63345]: DEBUG oslo_concurrency.lockutils [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.345618] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 769.346285] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-37fd8477-8b7b-4e6a-a6d0-8ac0c2204173 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.360840] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 769.361168] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 769.361942] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8faa8a0-b8d5-440f-8134-8ce655f272db {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.369740] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 769.369740] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]527bcba3-adf2-ff01-a420-66ad8cb5364f" [ 769.369740] env[63345]: _type = "Task" [ 769.369740] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.379681] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]527bcba3-adf2-ff01-a420-66ad8cb5364f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.464640] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b0dccf-9475-4343-a18b-1d7c9d444c23 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.473873] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ebc7a8b-9771-412c-b50b-01fcf2545f9c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.512640] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5100f07c-69a2-437e-888d-d5fab2e8603a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.520374] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786c161d-afc8-468e-aaaf-ce32885730bb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.533851] env[63345]: DEBUG nova.compute.provider_tree [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 769.549030] env[63345]: DEBUG nova.network.neutron [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Updating instance_info_cache with network_info: [{"id": "9dc2d1aa-5968-48ba-9b48-23f87e1e9419", "address": "fa:16:3e:49:8c:71", "network": {"id": "ce89b46a-97ec-4f2d-be39-333e9fcf307d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-416012078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b389a73e7804452b23d8c00bedd0362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9dc2d1aa-59", "ovs_interfaceid": "9dc2d1aa-5968-48ba-9b48-23f87e1e9419", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.623272] env[63345]: DEBUG nova.compute.manager [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 769.627132] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 769.627132] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6325b5-b58c-4a0c-bfbb-6b1ec2d70fea {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.635495] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 769.636466] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.636967] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd3623d4-a426-4105-9288-3b7c21c29883 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.643198] env[63345]: DEBUG oslo_vmware.api [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Waiting for the task: (returnval){ [ 769.643198] env[63345]: value = "task-1017021" [ 769.643198] env[63345]: _type = "Task" [ 769.643198] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.651776] env[63345]: DEBUG oslo_vmware.api [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': task-1017021, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.685741] env[63345]: DEBUG nova.compute.manager [req-4cde3436-61f1-40a3-a67f-19f56de65af0 req-66e22641-2da8-40e1-97b3-110c2fe327ec service nova] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Received event network-vif-deleted-cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 769.686017] env[63345]: INFO nova.compute.manager [req-4cde3436-61f1-40a3-a67f-19f56de65af0 req-66e22641-2da8-40e1-97b3-110c2fe327ec service nova] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Neutron deleted interface cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e; detaching it from the instance and deleting it from the info cache [ 769.686303] env[63345]: DEBUG nova.network.neutron [req-4cde3436-61f1-40a3-a67f-19f56de65af0 req-66e22641-2da8-40e1-97b3-110c2fe327ec service nova] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.710586] env[63345]: DEBUG oslo_vmware.api [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1017020, 'name': ReconfigVM_Task, 'duration_secs': 0.23089} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.711797] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-225930', 'volume_id': 'a6bb0405-c6c3-4109-b8e1-29b372ad3058', 'name': 'volume-a6bb0405-c6c3-4109-b8e1-29b372ad3058', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7bef089c-e93b-4ba6-a683-4e076489f92a', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6bb0405-c6c3-4109-b8e1-29b372ad3058', 'serial': 'a6bb0405-c6c3-4109-b8e1-29b372ad3058'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 769.711983] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 769.712829] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f46c996-077f-4564-aaef-8ba7fb78c9e8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.722809] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 769.723151] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac12568e-2eb2-4b44-9d4e-52d0c8695407 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.798081] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 769.798493] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Deleting contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 769.798556] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Deleting the datastore file [datastore1] 7bef089c-e93b-4ba6-a683-4e076489f92a {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 769.798808] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e273dec7-a19b-4d5e-8df1-8af5205ac52e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.805291] env[63345]: DEBUG oslo_vmware.api [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Waiting for the task: (returnval){ [ 769.805291] env[63345]: value = "task-1017023" [ 769.805291] env[63345]: _type = "Task" [ 769.805291] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.813345] env[63345]: DEBUG oslo_vmware.api [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1017023, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.883177] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]527bcba3-adf2-ff01-a420-66ad8cb5364f, 'name': SearchDatastore_Task, 'duration_secs': 0.035692} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.884181] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a8acf2b-7731-4094-a581-c24eaf3a0094 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.889765] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 769.889765] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f0f437-2a50-d7e8-ca6d-60033fafc77f" [ 769.889765] env[63345]: _type = "Task" [ 769.889765] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.898068] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f0f437-2a50-d7e8-ca6d-60033fafc77f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.012928] env[63345]: DEBUG nova.compute.manager [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 770.039784] env[63345]: DEBUG nova.scheduler.client.report [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 770.045183] env[63345]: DEBUG nova.virt.hardware [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 770.045438] env[63345]: DEBUG nova.virt.hardware [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 770.045605] env[63345]: DEBUG nova.virt.hardware [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 770.045795] env[63345]: DEBUG nova.virt.hardware [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 770.045948] env[63345]: DEBUG nova.virt.hardware [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 770.046156] env[63345]: DEBUG nova.virt.hardware [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 770.046377] env[63345]: DEBUG nova.virt.hardware [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 770.046544] env[63345]: DEBUG nova.virt.hardware [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 770.046716] env[63345]: DEBUG nova.virt.hardware [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 770.046881] env[63345]: DEBUG nova.virt.hardware [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 770.047072] env[63345]: DEBUG nova.virt.hardware [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 770.047949] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f7448c-db8b-4c3c-b559-a1dd6645cfee {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.052811] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Releasing lock "refresh_cache-b5173471-3367-42ba-b450-62ad8573f048" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.053116] env[63345]: DEBUG nova.compute.manager [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Instance network_info: |[{"id": "9dc2d1aa-5968-48ba-9b48-23f87e1e9419", "address": "fa:16:3e:49:8c:71", "network": {"id": "ce89b46a-97ec-4f2d-be39-333e9fcf307d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-416012078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b389a73e7804452b23d8c00bedd0362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9dc2d1aa-59", "ovs_interfaceid": "9dc2d1aa-5968-48ba-9b48-23f87e1e9419", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 770.055555] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:8c:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bd3c6b64-aba2-4bdc-a693-3b4dff3ed861', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9dc2d1aa-5968-48ba-9b48-23f87e1e9419', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 770.063435] env[63345]: DEBUG oslo.service.loopingcall [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 770.067394] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5173471-3367-42ba-b450-62ad8573f048] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 770.069064] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f366dd8-3ff2-4147-815a-e5322807106b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.075087] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d849e34-4e25-4bd7-9955-b9aae82219a8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.103438] env[63345]: DEBUG nova.network.neutron [-] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.106845] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 770.106845] env[63345]: value = "task-1017024" [ 770.106845] env[63345]: _type = "Task" [ 770.106845] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.114548] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017024, 'name': CreateVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.155009] env[63345]: DEBUG oslo_vmware.api [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': task-1017021, 'name': PowerOffVM_Task, 'duration_secs': 0.230078} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.155306] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 770.155482] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 770.155878] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d04dce6-1845-4d46-a7b1-c22f2f3cc458 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.188947] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-794b5e28-98a7-42a9-8d64-ed09ebdd600a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.198120] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a43b20-962c-4b70-a503-9e457adb2774 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.211435] env[63345]: DEBUG nova.compute.manager [req-bc8e03e5-1696-4f96-823c-52a8f4228a5d req-9e895be4-7fe4-4370-815c-0aa97a893c24 service nova] [instance: b5173471-3367-42ba-b450-62ad8573f048] Received event network-changed-9dc2d1aa-5968-48ba-9b48-23f87e1e9419 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 770.211998] env[63345]: DEBUG nova.compute.manager [req-bc8e03e5-1696-4f96-823c-52a8f4228a5d req-9e895be4-7fe4-4370-815c-0aa97a893c24 service nova] [instance: b5173471-3367-42ba-b450-62ad8573f048] Refreshing instance network info cache due to event network-changed-9dc2d1aa-5968-48ba-9b48-23f87e1e9419. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 770.212583] env[63345]: DEBUG oslo_concurrency.lockutils [req-bc8e03e5-1696-4f96-823c-52a8f4228a5d req-9e895be4-7fe4-4370-815c-0aa97a893c24 service nova] Acquiring lock "refresh_cache-b5173471-3367-42ba-b450-62ad8573f048" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.212583] env[63345]: DEBUG oslo_concurrency.lockutils [req-bc8e03e5-1696-4f96-823c-52a8f4228a5d req-9e895be4-7fe4-4370-815c-0aa97a893c24 service nova] Acquired lock "refresh_cache-b5173471-3367-42ba-b450-62ad8573f048" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.212708] env[63345]: DEBUG nova.network.neutron [req-bc8e03e5-1696-4f96-823c-52a8f4228a5d req-9e895be4-7fe4-4370-815c-0aa97a893c24 service nova] [instance: b5173471-3367-42ba-b450-62ad8573f048] Refreshing network info cache for port 9dc2d1aa-5968-48ba-9b48-23f87e1e9419 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 770.235077] env[63345]: DEBUG nova.compute.manager [req-4cde3436-61f1-40a3-a67f-19f56de65af0 req-66e22641-2da8-40e1-97b3-110c2fe327ec service nova] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Detach interface failed, port_id=cdecc5d9-4e4d-421b-80ac-a8ae91e31e7e, reason: Instance 778faa4f-4c5f-4ec2-b17b-5d7513c9c218 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 770.267382] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 770.267614] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 770.267845] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Deleting the datastore file [datastore2] 93112cc1-f9a1-4188-9555-bddf483426a1 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 770.268116] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30d7a621-fbdd-498a-961b-dc039279dfcc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.274041] env[63345]: DEBUG oslo_vmware.api [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Waiting for the task: (returnval){ [ 770.274041] env[63345]: value = "task-1017026" [ 770.274041] env[63345]: _type = "Task" [ 770.274041] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.282196] env[63345]: DEBUG oslo_vmware.api [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': task-1017026, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.314869] env[63345]: DEBUG oslo_vmware.api [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Task: {'id': task-1017023, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123125} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.314869] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 770.314952] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Deleted contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 770.315162] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 770.315361] env[63345]: INFO nova.compute.manager [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Took 2.31 seconds to destroy the instance on the hypervisor. [ 770.315626] env[63345]: DEBUG oslo.service.loopingcall [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 770.315827] env[63345]: DEBUG nova.compute.manager [-] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 770.315922] env[63345]: DEBUG nova.network.neutron [-] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 770.400924] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f0f437-2a50-d7e8-ca6d-60033fafc77f, 'name': SearchDatastore_Task, 'duration_secs': 0.010494} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.401944] env[63345]: DEBUG nova.network.neutron [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Successfully updated port: 34e64073-eaa9-42e5-b620-b216a15b02d1 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 770.403155] env[63345]: DEBUG oslo_concurrency.lockutils [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.403442] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] 3a85df04-3997-48a3-8992-f24fe997b3cc/3a85df04-3997-48a3-8992-f24fe997b3cc.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 770.403955] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62324630-9ecc-414f-8b89-5df88670a88e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.411371] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 770.411371] env[63345]: value = "task-1017027" [ 770.411371] env[63345]: _type = "Task" [ 770.411371] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.421138] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017027, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.552464] env[63345]: DEBUG oslo_concurrency.lockutils [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.553079] env[63345]: DEBUG nova.compute.manager [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 770.556073] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.922s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.557493] env[63345]: INFO nova.compute.claims [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 770.607607] env[63345]: INFO nova.compute.manager [-] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Took 1.51 seconds to deallocate network for instance. [ 770.623934] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017024, 'name': CreateVM_Task, 'duration_secs': 0.393056} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.623934] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5173471-3367-42ba-b450-62ad8573f048] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 770.624430] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.624536] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.624852] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 770.627953] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9119847-3266-49d8-a7b1-dc7e695eda8b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.631093] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 770.631093] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5289edaa-ac69-4882-0005-ccb82e024934" [ 770.631093] env[63345]: _type = "Task" [ 770.631093] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.643950] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5289edaa-ac69-4882-0005-ccb82e024934, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.789998] env[63345]: DEBUG oslo_vmware.api [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Task: {'id': task-1017026, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175245} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.789998] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 770.789998] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 770.790207] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 770.790961] env[63345]: INFO nova.compute.manager [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Took 1.17 seconds to destroy the instance on the hypervisor. [ 770.790961] env[63345]: DEBUG oslo.service.loopingcall [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 770.790961] env[63345]: DEBUG nova.compute.manager [-] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 770.791214] env[63345]: DEBUG nova.network.neutron [-] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 770.909445] env[63345]: DEBUG oslo_concurrency.lockutils [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "refresh_cache-bcec23fe-75c7-479e-9210-85ca6781d7e5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.910334] env[63345]: DEBUG oslo_concurrency.lockutils [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquired lock "refresh_cache-bcec23fe-75c7-479e-9210-85ca6781d7e5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.910673] env[63345]: DEBUG nova.network.neutron [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 770.931170] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017027, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.963059] env[63345]: DEBUG nova.network.neutron [req-bc8e03e5-1696-4f96-823c-52a8f4228a5d req-9e895be4-7fe4-4370-815c-0aa97a893c24 service nova] [instance: b5173471-3367-42ba-b450-62ad8573f048] Updated VIF entry in instance network info cache for port 9dc2d1aa-5968-48ba-9b48-23f87e1e9419. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 770.963466] env[63345]: DEBUG nova.network.neutron [req-bc8e03e5-1696-4f96-823c-52a8f4228a5d req-9e895be4-7fe4-4370-815c-0aa97a893c24 service nova] [instance: b5173471-3367-42ba-b450-62ad8573f048] Updating instance_info_cache with network_info: [{"id": "9dc2d1aa-5968-48ba-9b48-23f87e1e9419", "address": "fa:16:3e:49:8c:71", "network": {"id": "ce89b46a-97ec-4f2d-be39-333e9fcf307d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-416012078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b389a73e7804452b23d8c00bedd0362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9dc2d1aa-59", "ovs_interfaceid": "9dc2d1aa-5968-48ba-9b48-23f87e1e9419", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.066232] env[63345]: DEBUG nova.compute.utils [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 771.068265] env[63345]: DEBUG nova.compute.manager [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 771.068593] env[63345]: DEBUG nova.network.neutron [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 771.117096] env[63345]: DEBUG nova.policy [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7fd220f5f1f14da5a195b66bd5800cb9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9a46ce98d4b4b289b27f2f5b9a8a446', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 771.125152] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.143489] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5289edaa-ac69-4882-0005-ccb82e024934, 'name': SearchDatastore_Task, 'duration_secs': 0.012768} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.143796] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.144054] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 771.144572] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 771.144775] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.144969] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 771.145252] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-376f021b-054f-45b0-8ea7-cc1de9e3424d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.154574] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 771.154774] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 771.155717] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1e11026-5396-4d8e-942e-c6de64b3ed59 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.160964] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 771.160964] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]524f025f-e1e4-d2eb-2276-1de911514c8c" [ 771.160964] env[63345]: _type = "Task" [ 771.160964] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.168969] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524f025f-e1e4-d2eb-2276-1de911514c8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.408506] env[63345]: DEBUG nova.network.neutron [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Successfully created port: 57bdfdfd-6642-43d9-a9b0-52fc8ac0b050 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 771.426547] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017027, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572387} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.427204] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] 3a85df04-3997-48a3-8992-f24fe997b3cc/3a85df04-3997-48a3-8992-f24fe997b3cc.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 771.427602] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 771.427949] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bc930483-8a9d-4ef8-adaf-00fb0aff18e4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.439153] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 771.439153] env[63345]: value = "task-1017028" [ 771.439153] env[63345]: _type = "Task" [ 771.439153] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.449422] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017028, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.466138] env[63345]: DEBUG oslo_concurrency.lockutils [req-bc8e03e5-1696-4f96-823c-52a8f4228a5d req-9e895be4-7fe4-4370-815c-0aa97a893c24 service nova] Releasing lock "refresh_cache-b5173471-3367-42ba-b450-62ad8573f048" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.473652] env[63345]: DEBUG nova.network.neutron [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 771.574660] env[63345]: DEBUG nova.compute.manager [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 771.677407] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524f025f-e1e4-d2eb-2276-1de911514c8c, 'name': SearchDatastore_Task, 'duration_secs': 0.00897} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.678265] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db03afba-8a42-4fa5-b752-0dfbbd8fb112 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.684943] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 771.684943] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52015be0-6618-04b8-1816-aae2077fb0cd" [ 771.684943] env[63345]: _type = "Task" [ 771.684943] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.703040] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52015be0-6618-04b8-1816-aae2077fb0cd, 'name': SearchDatastore_Task, 'duration_secs': 0.009488} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.704145] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.704559] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] b5173471-3367-42ba-b450-62ad8573f048/b5173471-3367-42ba-b450-62ad8573f048.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 771.707016] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-734b1fdb-bb5a-4d12-a620-9b87b1faf7a6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.715953] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 771.715953] env[63345]: value = "task-1017029" [ 771.715953] env[63345]: _type = "Task" [ 771.715953] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.725979] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017029, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.732352] env[63345]: DEBUG nova.compute.manager [req-8bbfd835-dd99-488f-a9e8-65326ea594fc req-6d492a13-8cae-45ee-b75d-31019da7c291 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Received event network-vif-deleted-e5fd2647-d313-4830-9b9d-0722b78abadb {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 771.732352] env[63345]: INFO nova.compute.manager [req-8bbfd835-dd99-488f-a9e8-65326ea594fc req-6d492a13-8cae-45ee-b75d-31019da7c291 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Neutron deleted interface e5fd2647-d313-4830-9b9d-0722b78abadb; detaching it from the instance and deleting it from the info cache [ 771.732352] env[63345]: DEBUG nova.network.neutron [req-8bbfd835-dd99-488f-a9e8-65326ea594fc req-6d492a13-8cae-45ee-b75d-31019da7c291 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Updating instance_info_cache with network_info: [{"id": "b8d03603-203a-4bdb-ac34-d490f6f611b2", "address": "fa:16:3e:a7:2d:46", "network": {"id": "8f47abc5-e642-4441-94a8-79deff38af77", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-595220153", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4919485c7e184230b38e703f7ce8a047", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8d03603-20", "ovs_interfaceid": "b8d03603-203a-4bdb-ac34-d490f6f611b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.733821] env[63345]: DEBUG nova.network.neutron [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Updating instance_info_cache with network_info: [{"id": "34e64073-eaa9-42e5-b620-b216a15b02d1", "address": "fa:16:3e:66:81:56", "network": {"id": "403ac06e-e45e-4215-bf0c-16ddd583ddc5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1349318740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac5c2a653dae436c97514507939c4e3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34e64073-ea", "ovs_interfaceid": "34e64073-eaa9-42e5-b620-b216a15b02d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.952372] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017028, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082811} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.952746] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 771.953677] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb620c68-47cb-4377-91fc-73216ed00506 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.973743] env[63345]: DEBUG nova.network.neutron [-] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.983654] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 3a85df04-3997-48a3-8992-f24fe997b3cc/3a85df04-3997-48a3-8992-f24fe997b3cc.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 771.986884] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a44b7495-c549-446e-ad27-e731f174ff2a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.009034] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 772.009034] env[63345]: value = "task-1017030" [ 772.009034] env[63345]: _type = "Task" [ 772.009034] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.020086] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017030, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.071943] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfa3c71-a8d4-4107-bc5a-3b38c350cd24 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.083570] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95a023a-c940-4282-baa5-da6ab3170b6a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.117951] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6884374b-bf56-45c5-96c3-d68620a98dee {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.128595] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe356338-f47d-4c35-8b36-ca84e30b7d21 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.142467] env[63345]: DEBUG nova.compute.provider_tree [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 772.194305] env[63345]: DEBUG nova.network.neutron [-] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.231023] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017029, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509393} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.231023] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] b5173471-3367-42ba-b450-62ad8573f048/b5173471-3367-42ba-b450-62ad8573f048.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 772.231023] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 772.231023] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d28a5489-ef0b-4603-9427-d8c1cf7e7b61 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.237936] env[63345]: DEBUG oslo_concurrency.lockutils [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Releasing lock "refresh_cache-bcec23fe-75c7-479e-9210-85ca6781d7e5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 772.238299] env[63345]: DEBUG nova.compute.manager [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Instance network_info: |[{"id": "34e64073-eaa9-42e5-b620-b216a15b02d1", "address": "fa:16:3e:66:81:56", "network": {"id": "403ac06e-e45e-4215-bf0c-16ddd583ddc5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1349318740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac5c2a653dae436c97514507939c4e3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34e64073-ea", "ovs_interfaceid": "34e64073-eaa9-42e5-b620-b216a15b02d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 772.238641] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 772.238641] env[63345]: value = "task-1017031" [ 772.238641] env[63345]: _type = "Task" [ 772.238641] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.238889] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-538285ea-3315-42f1-bcc7-a426125ebad4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.240966] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:81:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e41c97-4d75-4041-ae71-321e7e9d480b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34e64073-eaa9-42e5-b620-b216a15b02d1', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 772.250484] env[63345]: DEBUG oslo.service.loopingcall [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 772.252849] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 772.256959] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a275d56c-3e86-4d3f-8d3d-d10247609718 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.277409] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1623c88c-c960-4213-95dd-c038855048b3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.292372] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 772.292372] env[63345]: value = "task-1017032" [ 772.292372] env[63345]: _type = "Task" [ 772.292372] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.292695] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017031, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.295973] env[63345]: DEBUG nova.compute.manager [req-b7a5d456-a20f-419f-90ba-2be7202f0d71 req-762a11d4-dcc9-4418-9bf5-e412decf5f25 service nova] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Received event network-vif-plugged-34e64073-eaa9-42e5-b620-b216a15b02d1 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 772.295973] env[63345]: DEBUG oslo_concurrency.lockutils [req-b7a5d456-a20f-419f-90ba-2be7202f0d71 req-762a11d4-dcc9-4418-9bf5-e412decf5f25 service nova] Acquiring lock "bcec23fe-75c7-479e-9210-85ca6781d7e5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.296440] env[63345]: DEBUG oslo_concurrency.lockutils [req-b7a5d456-a20f-419f-90ba-2be7202f0d71 req-762a11d4-dcc9-4418-9bf5-e412decf5f25 service nova] Lock "bcec23fe-75c7-479e-9210-85ca6781d7e5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.296440] env[63345]: DEBUG oslo_concurrency.lockutils [req-b7a5d456-a20f-419f-90ba-2be7202f0d71 req-762a11d4-dcc9-4418-9bf5-e412decf5f25 service nova] Lock "bcec23fe-75c7-479e-9210-85ca6781d7e5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.296586] env[63345]: DEBUG nova.compute.manager [req-b7a5d456-a20f-419f-90ba-2be7202f0d71 req-762a11d4-dcc9-4418-9bf5-e412decf5f25 service nova] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] No waiting events found dispatching network-vif-plugged-34e64073-eaa9-42e5-b620-b216a15b02d1 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 772.296751] env[63345]: WARNING nova.compute.manager [req-b7a5d456-a20f-419f-90ba-2be7202f0d71 req-762a11d4-dcc9-4418-9bf5-e412decf5f25 service nova] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Received unexpected event network-vif-plugged-34e64073-eaa9-42e5-b620-b216a15b02d1 for instance with vm_state building and task_state spawning. [ 772.296910] env[63345]: DEBUG nova.compute.manager [req-b7a5d456-a20f-419f-90ba-2be7202f0d71 req-762a11d4-dcc9-4418-9bf5-e412decf5f25 service nova] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Received event network-changed-34e64073-eaa9-42e5-b620-b216a15b02d1 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 772.297076] env[63345]: DEBUG nova.compute.manager [req-b7a5d456-a20f-419f-90ba-2be7202f0d71 req-762a11d4-dcc9-4418-9bf5-e412decf5f25 service nova] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Refreshing instance network info cache due to event network-changed-34e64073-eaa9-42e5-b620-b216a15b02d1. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 772.297261] env[63345]: DEBUG oslo_concurrency.lockutils [req-b7a5d456-a20f-419f-90ba-2be7202f0d71 req-762a11d4-dcc9-4418-9bf5-e412decf5f25 service nova] Acquiring lock "refresh_cache-bcec23fe-75c7-479e-9210-85ca6781d7e5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.300018] env[63345]: DEBUG oslo_concurrency.lockutils [req-b7a5d456-a20f-419f-90ba-2be7202f0d71 req-762a11d4-dcc9-4418-9bf5-e412decf5f25 service nova] Acquired lock "refresh_cache-bcec23fe-75c7-479e-9210-85ca6781d7e5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.300018] env[63345]: DEBUG nova.network.neutron [req-b7a5d456-a20f-419f-90ba-2be7202f0d71 req-762a11d4-dcc9-4418-9bf5-e412decf5f25 service nova] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Refreshing network info cache for port 34e64073-eaa9-42e5-b620-b216a15b02d1 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 772.314157] env[63345]: DEBUG nova.compute.manager [req-8bbfd835-dd99-488f-a9e8-65326ea594fc req-6d492a13-8cae-45ee-b75d-31019da7c291 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Detach interface failed, port_id=e5fd2647-d313-4830-9b9d-0722b78abadb, reason: Instance 93112cc1-f9a1-4188-9555-bddf483426a1 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 772.314412] env[63345]: DEBUG nova.compute.manager [req-8bbfd835-dd99-488f-a9e8-65326ea594fc req-6d492a13-8cae-45ee-b75d-31019da7c291 service nova] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Received event network-vif-deleted-49047c62-1eed-4563-b10d-31b82cc302ff {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 772.314597] env[63345]: INFO nova.compute.manager [req-8bbfd835-dd99-488f-a9e8-65326ea594fc req-6d492a13-8cae-45ee-b75d-31019da7c291 service nova] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Neutron deleted interface 49047c62-1eed-4563-b10d-31b82cc302ff; detaching it from the instance and deleting it from the info cache [ 772.314775] env[63345]: DEBUG nova.network.neutron [req-8bbfd835-dd99-488f-a9e8-65326ea594fc req-6d492a13-8cae-45ee-b75d-31019da7c291 service nova] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.320531] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017032, 'name': CreateVM_Task} progress is 15%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.487831] env[63345]: INFO nova.compute.manager [-] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Took 2.17 seconds to deallocate network for instance. [ 772.519617] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017030, 'name': ReconfigVM_Task, 'duration_secs': 0.504707} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.519946] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 3a85df04-3997-48a3-8992-f24fe997b3cc/3a85df04-3997-48a3-8992-f24fe997b3cc.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 772.520619] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f3df53e2-57d7-4ca4-8cd9-b36147dde848 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.526963] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 772.526963] env[63345]: value = "task-1017033" [ 772.526963] env[63345]: _type = "Task" [ 772.526963] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.534680] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017033, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.589275] env[63345]: DEBUG nova.compute.manager [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 772.618066] env[63345]: DEBUG nova.virt.hardware [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 772.618334] env[63345]: DEBUG nova.virt.hardware [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 772.618493] env[63345]: DEBUG nova.virt.hardware [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 772.618683] env[63345]: DEBUG nova.virt.hardware [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 772.618835] env[63345]: DEBUG nova.virt.hardware [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 772.618987] env[63345]: DEBUG nova.virt.hardware [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 772.619219] env[63345]: DEBUG nova.virt.hardware [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 772.619383] env[63345]: DEBUG nova.virt.hardware [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 772.619556] env[63345]: DEBUG nova.virt.hardware [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 772.620012] env[63345]: DEBUG nova.virt.hardware [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 772.620012] env[63345]: DEBUG nova.virt.hardware [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 772.620885] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6303bd5e-24a9-4093-acb6-541e51f53194 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.629170] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdca94a1-7b32-458e-9969-aa07eb100c7d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.645272] env[63345]: DEBUG nova.scheduler.client.report [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 772.697197] env[63345]: INFO nova.compute.manager [-] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Took 1.91 seconds to deallocate network for instance. [ 772.761394] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017031, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070334} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.761755] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 772.762477] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-333956c5-6203-4023-bee6-79762f67b098 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.785215] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] b5173471-3367-42ba-b450-62ad8573f048/b5173471-3367-42ba-b450-62ad8573f048.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 772.785472] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f4b53b8-57e4-4eda-9b1f-d5b4d12527c0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.807788] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 772.807788] env[63345]: value = "task-1017034" [ 772.807788] env[63345]: _type = "Task" [ 772.807788] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.810833] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017032, 'name': CreateVM_Task, 'duration_secs': 0.356726} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.813628] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 772.814259] env[63345]: DEBUG oslo_concurrency.lockutils [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.814427] env[63345]: DEBUG oslo_concurrency.lockutils [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.814810] env[63345]: DEBUG oslo_concurrency.lockutils [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 772.815340] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52c7a996-508e-4a73-98be-ba396c82aeba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.820510] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017034, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.821342] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-95e921a9-088b-4103-bd1f-c79c3c2a884d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.825173] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 772.825173] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52db20d8-a6f9-3e1c-9675-dd53a81de010" [ 772.825173] env[63345]: _type = "Task" [ 772.825173] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.832656] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d397b7c4-6040-4d2a-898a-1f61f8950b75 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.845741] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52db20d8-a6f9-3e1c-9675-dd53a81de010, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.869021] env[63345]: DEBUG nova.compute.manager [req-8bbfd835-dd99-488f-a9e8-65326ea594fc req-6d492a13-8cae-45ee-b75d-31019da7c291 service nova] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Detach interface failed, port_id=49047c62-1eed-4563-b10d-31b82cc302ff, reason: Instance 7bef089c-e93b-4ba6-a683-4e076489f92a could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 772.869278] env[63345]: DEBUG nova.compute.manager [req-8bbfd835-dd99-488f-a9e8-65326ea594fc req-6d492a13-8cae-45ee-b75d-31019da7c291 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Received event network-vif-deleted-b8d03603-203a-4bdb-ac34-d490f6f611b2 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 772.869458] env[63345]: INFO nova.compute.manager [req-8bbfd835-dd99-488f-a9e8-65326ea594fc req-6d492a13-8cae-45ee-b75d-31019da7c291 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Neutron deleted interface b8d03603-203a-4bdb-ac34-d490f6f611b2; detaching it from the instance and deleting it from the info cache [ 772.869655] env[63345]: DEBUG nova.network.neutron [req-8bbfd835-dd99-488f-a9e8-65326ea594fc req-6d492a13-8cae-45ee-b75d-31019da7c291 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.037277] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017033, 'name': Rename_Task, 'duration_secs': 0.174936} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.037692] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 773.037963] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f00d2c81-8476-4547-a5e3-a13970f25b06 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.044406] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 773.044406] env[63345]: value = "task-1017035" [ 773.044406] env[63345]: _type = "Task" [ 773.044406] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.053526] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017035, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.068968] env[63345]: INFO nova.compute.manager [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Took 0.58 seconds to detach 1 volumes for instance. [ 773.073472] env[63345]: DEBUG nova.compute.manager [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Deleting volume: a6bb0405-c6c3-4109-b8e1-29b372ad3058 {{(pid=63345) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3281}} [ 773.131807] env[63345]: DEBUG nova.network.neutron [req-b7a5d456-a20f-419f-90ba-2be7202f0d71 req-762a11d4-dcc9-4418-9bf5-e412decf5f25 service nova] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Updated VIF entry in instance network info cache for port 34e64073-eaa9-42e5-b620-b216a15b02d1. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 773.131807] env[63345]: DEBUG nova.network.neutron [req-b7a5d456-a20f-419f-90ba-2be7202f0d71 req-762a11d4-dcc9-4418-9bf5-e412decf5f25 service nova] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Updating instance_info_cache with network_info: [{"id": "34e64073-eaa9-42e5-b620-b216a15b02d1", "address": "fa:16:3e:66:81:56", "network": {"id": "403ac06e-e45e-4215-bf0c-16ddd583ddc5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1349318740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac5c2a653dae436c97514507939c4e3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34e64073-ea", "ovs_interfaceid": "34e64073-eaa9-42e5-b620-b216a15b02d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.152940] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.153241] env[63345]: DEBUG nova.compute.manager [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 773.158890] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.344s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.159222] env[63345]: DEBUG nova.objects.instance [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Lazy-loading 'resources' on Instance uuid 64fcf837-1d9d-41b1-a2a1-3c16362932cf {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 773.204386] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.213319] env[63345]: DEBUG nova.network.neutron [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Successfully updated port: 57bdfdfd-6642-43d9-a9b0-52fc8ac0b050 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 773.319888] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017034, 'name': ReconfigVM_Task, 'duration_secs': 0.272061} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.320205] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Reconfigured VM instance instance-00000039 to attach disk [datastore2] b5173471-3367-42ba-b450-62ad8573f048/b5173471-3367-42ba-b450-62ad8573f048.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 773.320862] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b880403-696e-47dc-923e-fc82f777a7cf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.327012] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 773.327012] env[63345]: value = "task-1017037" [ 773.327012] env[63345]: _type = "Task" [ 773.327012] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.337396] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52db20d8-a6f9-3e1c-9675-dd53a81de010, 'name': SearchDatastore_Task, 'duration_secs': 0.008523} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.340464] env[63345]: DEBUG oslo_concurrency.lockutils [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.340741] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 773.340978] env[63345]: DEBUG oslo_concurrency.lockutils [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.341159] env[63345]: DEBUG oslo_concurrency.lockutils [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.341595] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 773.341595] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017037, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.341811] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da9bd236-04b8-49e6-94aa-db4ce22ba991 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.349706] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 773.349898] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 773.350747] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6aac571a-5d9e-41fe-a196-e2a392667210 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.355558] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 773.355558] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d522d1-da10-99b3-56c5-683c23a8d04e" [ 773.355558] env[63345]: _type = "Task" [ 773.355558] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.364470] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d522d1-da10-99b3-56c5-683c23a8d04e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.373235] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9622796c-1c06-41f6-8564-87a0311c75fb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.382206] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c815128-fd33-4eac-9f55-77c91a5116ec {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.412585] env[63345]: DEBUG nova.compute.manager [req-8bbfd835-dd99-488f-a9e8-65326ea594fc req-6d492a13-8cae-45ee-b75d-31019da7c291 service nova] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Detach interface failed, port_id=b8d03603-203a-4bdb-ac34-d490f6f611b2, reason: Instance 93112cc1-f9a1-4188-9555-bddf483426a1 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 773.554954] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017035, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.621758] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.634721] env[63345]: DEBUG oslo_concurrency.lockutils [req-b7a5d456-a20f-419f-90ba-2be7202f0d71 req-762a11d4-dcc9-4418-9bf5-e412decf5f25 service nova] Releasing lock "refresh_cache-bcec23fe-75c7-479e-9210-85ca6781d7e5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.662703] env[63345]: DEBUG nova.compute.utils [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 773.664280] env[63345]: DEBUG nova.compute.manager [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 773.664897] env[63345]: DEBUG nova.network.neutron [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 773.715573] env[63345]: DEBUG nova.policy [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '40f776b6c1024b229e775f7d480b0bc7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67787a91980240fdadc8d33bb30e682a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 773.717454] env[63345]: DEBUG oslo_concurrency.lockutils [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Acquiring lock "refresh_cache-a9b69d13-6330-4f9b-b8e1-1c0017655f9f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.717589] env[63345]: DEBUG oslo_concurrency.lockutils [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Acquired lock "refresh_cache-a9b69d13-6330-4f9b-b8e1-1c0017655f9f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.718077] env[63345]: DEBUG nova.network.neutron [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 773.839968] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017037, 'name': Rename_Task, 'duration_secs': 0.158349} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.845855] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 773.845855] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7de95f83-c510-4976-ac0b-028dece69c5f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.849868] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 773.849868] env[63345]: value = "task-1017038" [ 773.849868] env[63345]: _type = "Task" [ 773.849868] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.863219] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017038, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.868736] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d522d1-da10-99b3-56c5-683c23a8d04e, 'name': SearchDatastore_Task, 'duration_secs': 0.008647} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.872134] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-195e8b67-3893-412b-8734-c79fbd5e5499 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.877287] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 773.877287] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529377d6-b586-c827-fae3-a884ddd70968" [ 773.877287] env[63345]: _type = "Task" [ 773.877287] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.886273] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]529377d6-b586-c827-fae3-a884ddd70968, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.036173] env[63345]: DEBUG nova.network.neutron [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Successfully created port: 9674730a-bad7-4d14-8348-d1d12f0c1c89 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 774.059961] env[63345]: DEBUG oslo_vmware.api [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017035, 'name': PowerOnVM_Task, 'duration_secs': 0.517796} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.060760] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 774.061102] env[63345]: INFO nova.compute.manager [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Took 9.37 seconds to spawn the instance on the hypervisor. [ 774.061391] env[63345]: DEBUG nova.compute.manager [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 774.062319] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6de052-81f8-444b-a9ad-22bd208a3789 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.172788] env[63345]: DEBUG nova.compute.manager [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 774.207084] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10276b1a-5eb4-4f03-a4bf-4f44f5e8c1b4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.215691] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aebc2377-a857-4c5c-b693-74ef45e479d0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.252706] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f59b77-dd93-43fc-8c12-4429c550c3e0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.260952] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced1e4b3-7f12-4109-bd99-07bf27011169 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.276273] env[63345]: DEBUG nova.compute.provider_tree [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 774.293564] env[63345]: DEBUG nova.network.neutron [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 774.332464] env[63345]: DEBUG nova.compute.manager [req-fda80f8d-633d-42ae-8ad1-93dcb88fba58 req-da1b3420-9bab-4010-86d1-d0ed1198affe service nova] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Received event network-vif-plugged-57bdfdfd-6642-43d9-a9b0-52fc8ac0b050 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 774.332696] env[63345]: DEBUG oslo_concurrency.lockutils [req-fda80f8d-633d-42ae-8ad1-93dcb88fba58 req-da1b3420-9bab-4010-86d1-d0ed1198affe service nova] Acquiring lock "a9b69d13-6330-4f9b-b8e1-1c0017655f9f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.332937] env[63345]: DEBUG oslo_concurrency.lockutils [req-fda80f8d-633d-42ae-8ad1-93dcb88fba58 req-da1b3420-9bab-4010-86d1-d0ed1198affe service nova] Lock "a9b69d13-6330-4f9b-b8e1-1c0017655f9f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.333132] env[63345]: DEBUG oslo_concurrency.lockutils [req-fda80f8d-633d-42ae-8ad1-93dcb88fba58 req-da1b3420-9bab-4010-86d1-d0ed1198affe service nova] Lock "a9b69d13-6330-4f9b-b8e1-1c0017655f9f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.333312] env[63345]: DEBUG nova.compute.manager [req-fda80f8d-633d-42ae-8ad1-93dcb88fba58 req-da1b3420-9bab-4010-86d1-d0ed1198affe service nova] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] No waiting events found dispatching network-vif-plugged-57bdfdfd-6642-43d9-a9b0-52fc8ac0b050 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 774.333483] env[63345]: WARNING nova.compute.manager [req-fda80f8d-633d-42ae-8ad1-93dcb88fba58 req-da1b3420-9bab-4010-86d1-d0ed1198affe service nova] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Received unexpected event network-vif-plugged-57bdfdfd-6642-43d9-a9b0-52fc8ac0b050 for instance with vm_state building and task_state spawning. [ 774.334432] env[63345]: DEBUG nova.compute.manager [req-fda80f8d-633d-42ae-8ad1-93dcb88fba58 req-da1b3420-9bab-4010-86d1-d0ed1198affe service nova] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Received event network-changed-57bdfdfd-6642-43d9-a9b0-52fc8ac0b050 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 774.334432] env[63345]: DEBUG nova.compute.manager [req-fda80f8d-633d-42ae-8ad1-93dcb88fba58 req-da1b3420-9bab-4010-86d1-d0ed1198affe service nova] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Refreshing instance network info cache due to event network-changed-57bdfdfd-6642-43d9-a9b0-52fc8ac0b050. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 774.334725] env[63345]: DEBUG oslo_concurrency.lockutils [req-fda80f8d-633d-42ae-8ad1-93dcb88fba58 req-da1b3420-9bab-4010-86d1-d0ed1198affe service nova] Acquiring lock "refresh_cache-a9b69d13-6330-4f9b-b8e1-1c0017655f9f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.361830] env[63345]: DEBUG oslo_vmware.api [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017038, 'name': PowerOnVM_Task, 'duration_secs': 0.454446} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.362083] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 774.362687] env[63345]: INFO nova.compute.manager [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Took 7.05 seconds to spawn the instance on the hypervisor. [ 774.362687] env[63345]: DEBUG nova.compute.manager [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 774.363663] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe3ecba-eb79-4d4c-a056-7194eca6752f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.391338] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]529377d6-b586-c827-fae3-a884ddd70968, 'name': SearchDatastore_Task, 'duration_secs': 0.01898} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.392106] env[63345]: DEBUG oslo_concurrency.lockutils [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.392106] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] bcec23fe-75c7-479e-9210-85ca6781d7e5/bcec23fe-75c7-479e-9210-85ca6781d7e5.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 774.392318] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b25602ce-537c-4ba2-a7e2-51a8d2504cdf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.399107] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 774.399107] env[63345]: value = "task-1017039" [ 774.399107] env[63345]: _type = "Task" [ 774.399107] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.408724] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017039, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.474874] env[63345]: DEBUG nova.network.neutron [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Updating instance_info_cache with network_info: [{"id": "57bdfdfd-6642-43d9-a9b0-52fc8ac0b050", "address": "fa:16:3e:e3:f6:87", "network": {"id": "0514ca1d-be21-44fc-ba2c-7282fb9c2304", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-344341582-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9a46ce98d4b4b289b27f2f5b9a8a446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57bdfdfd-66", "ovs_interfaceid": "57bdfdfd-6642-43d9-a9b0-52fc8ac0b050", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.588087] env[63345]: INFO nova.compute.manager [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Took 41.77 seconds to build instance. [ 774.779268] env[63345]: DEBUG nova.scheduler.client.report [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 774.883534] env[63345]: INFO nova.compute.manager [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Took 40.91 seconds to build instance. [ 774.910983] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017039, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.977787] env[63345]: DEBUG oslo_concurrency.lockutils [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Releasing lock "refresh_cache-a9b69d13-6330-4f9b-b8e1-1c0017655f9f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.977985] env[63345]: DEBUG nova.compute.manager [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Instance network_info: |[{"id": "57bdfdfd-6642-43d9-a9b0-52fc8ac0b050", "address": "fa:16:3e:e3:f6:87", "network": {"id": "0514ca1d-be21-44fc-ba2c-7282fb9c2304", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-344341582-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9a46ce98d4b4b289b27f2f5b9a8a446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57bdfdfd-66", "ovs_interfaceid": "57bdfdfd-6642-43d9-a9b0-52fc8ac0b050", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 774.978347] env[63345]: DEBUG oslo_concurrency.lockutils [req-fda80f8d-633d-42ae-8ad1-93dcb88fba58 req-da1b3420-9bab-4010-86d1-d0ed1198affe service nova] Acquired lock "refresh_cache-a9b69d13-6330-4f9b-b8e1-1c0017655f9f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.978563] env[63345]: DEBUG nova.network.neutron [req-fda80f8d-633d-42ae-8ad1-93dcb88fba58 req-da1b3420-9bab-4010-86d1-d0ed1198affe service nova] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Refreshing network info cache for port 57bdfdfd-6642-43d9-a9b0-52fc8ac0b050 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 774.979841] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:f6:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35ac9709-fd8b-4630-897a-68ed629d1b11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57bdfdfd-6642-43d9-a9b0-52fc8ac0b050', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 774.988335] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Creating folder: Project (c9a46ce98d4b4b289b27f2f5b9a8a446). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 774.988948] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f719fb2-9ba7-4a63-a36b-9531322bc29e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.000088] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Created folder: Project (c9a46ce98d4b4b289b27f2f5b9a8a446) in parent group-v225918. [ 775.000340] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Creating folder: Instances. Parent ref: group-v226029. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 775.001238] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0eb80e2-89b8-4896-b0d8-23eb73f4a55f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.011965] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Created folder: Instances in parent group-v226029. [ 775.012260] env[63345]: DEBUG oslo.service.loopingcall [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 775.012460] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 775.013030] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1fe78f05-3db9-42b9-8c7e-6a35949ce6af {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.033240] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 775.033240] env[63345]: value = "task-1017042" [ 775.033240] env[63345]: _type = "Task" [ 775.033240] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.043171] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017042, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.090073] env[63345]: DEBUG oslo_concurrency.lockutils [None req-774a47dd-b473-4d15-9ab9-a251b50eef81 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "3a85df04-3997-48a3-8992-f24fe997b3cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 119.107s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.179759] env[63345]: DEBUG nova.compute.manager [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 775.213018] env[63345]: DEBUG nova.virt.hardware [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 775.213378] env[63345]: DEBUG nova.virt.hardware [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 775.213563] env[63345]: DEBUG nova.virt.hardware [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 775.213816] env[63345]: DEBUG nova.virt.hardware [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 775.214048] env[63345]: DEBUG nova.virt.hardware [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 775.214270] env[63345]: DEBUG nova.virt.hardware [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 775.214528] env[63345]: DEBUG nova.virt.hardware [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 775.214705] env[63345]: DEBUG nova.virt.hardware [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 775.214883] env[63345]: DEBUG nova.virt.hardware [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 775.215163] env[63345]: DEBUG nova.virt.hardware [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 775.215310] env[63345]: DEBUG nova.virt.hardware [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 775.216620] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c09dd880-a42a-40a5-9e28-6d0d88a4d998 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.227584] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79208fc3-dd58-4212-baed-af33bbd9f48a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.284978] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.126s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.289083] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.428s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.289463] env[63345]: DEBUG nova.objects.instance [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lazy-loading 'resources' on Instance uuid 3101726f-5b14-417e-bcf8-390ce1f9b467 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 775.312267] env[63345]: INFO nova.scheduler.client.report [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Deleted allocations for instance 64fcf837-1d9d-41b1-a2a1-3c16362932cf [ 775.385317] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e75905f3-55af-4c3c-a98f-c1470127c787 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "b5173471-3367-42ba-b450-62ad8573f048" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 118.705s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.410964] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017039, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.881905} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.411333] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] bcec23fe-75c7-479e-9210-85ca6781d7e5/bcec23fe-75c7-479e-9210-85ca6781d7e5.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 775.411616] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 775.411955] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6ee86767-0509-414f-a3b2-c3b0efb8d59a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.418820] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 775.418820] env[63345]: value = "task-1017043" [ 775.418820] env[63345]: _type = "Task" [ 775.418820] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.426742] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017043, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.543193] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017042, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.594687] env[63345]: DEBUG nova.compute.manager [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 775.821984] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a10b6693-c455-4fba-9874-34dcf1ff07be tempest-ServerMetadataTestJSON-1127927341 tempest-ServerMetadataTestJSON-1127927341-project-member] Lock "64fcf837-1d9d-41b1-a2a1-3c16362932cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.537s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.908241] env[63345]: DEBUG nova.network.neutron [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Successfully updated port: 9674730a-bad7-4d14-8348-d1d12f0c1c89 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 775.932646] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017043, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085066} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.935515] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 775.939149] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3760ba92-5bbe-4ac7-8779-247e18bf2e2b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.962277] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] bcec23fe-75c7-479e-9210-85ca6781d7e5/bcec23fe-75c7-479e-9210-85ca6781d7e5.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 775.964968] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e85e4f8f-f501-4a25-8633-e61b424e7e78 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.981927] env[63345]: DEBUG nova.network.neutron [req-fda80f8d-633d-42ae-8ad1-93dcb88fba58 req-da1b3420-9bab-4010-86d1-d0ed1198affe service nova] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Updated VIF entry in instance network info cache for port 57bdfdfd-6642-43d9-a9b0-52fc8ac0b050. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 775.982939] env[63345]: DEBUG nova.network.neutron [req-fda80f8d-633d-42ae-8ad1-93dcb88fba58 req-da1b3420-9bab-4010-86d1-d0ed1198affe service nova] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Updating instance_info_cache with network_info: [{"id": "57bdfdfd-6642-43d9-a9b0-52fc8ac0b050", "address": "fa:16:3e:e3:f6:87", "network": {"id": "0514ca1d-be21-44fc-ba2c-7282fb9c2304", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-344341582-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9a46ce98d4b4b289b27f2f5b9a8a446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57bdfdfd-66", "ovs_interfaceid": "57bdfdfd-6642-43d9-a9b0-52fc8ac0b050", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.989674] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 775.989674] env[63345]: value = "task-1017044" [ 775.989674] env[63345]: _type = "Task" [ 775.989674] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.002675] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017044, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.043626] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017042, 'name': CreateVM_Task, 'duration_secs': 0.605605} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.045994] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 776.046864] env[63345]: DEBUG oslo_concurrency.lockutils [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.047053] env[63345]: DEBUG oslo_concurrency.lockutils [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.047373] env[63345]: DEBUG oslo_concurrency.lockutils [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 776.047627] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-033e83b4-42d7-4126-b342-4029d2257bd9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.051913] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Waiting for the task: (returnval){ [ 776.051913] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]520b213d-65da-111b-0fc7-f268a7d9fa1f" [ 776.051913] env[63345]: _type = "Task" [ 776.051913] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.059845] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]520b213d-65da-111b-0fc7-f268a7d9fa1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.116187] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.264326] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8228f2f-769a-4188-8436-b3e02c544a70 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.273577] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b15a55-7b18-4497-b63c-67f0f6645d85 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.312727] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-173b71db-5712-4a13-bc34-6f571fc745f6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.320343] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "a85688b0-d68f-4370-bd95-dc9fb1d2c26a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.320664] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "a85688b0-d68f-4370-bd95-dc9fb1d2c26a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.331244] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c16eb12-1836-4a20-92d1-c98e3924a961 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.347567] env[63345]: DEBUG nova.compute.provider_tree [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 776.409901] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Acquiring lock "refresh_cache-02eb493e-d1a1-4461-8e3f-e493e96fe058" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.410124] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Acquired lock "refresh_cache-02eb493e-d1a1-4461-8e3f-e493e96fe058" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.410321] env[63345]: DEBUG nova.network.neutron [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 776.463760] env[63345]: DEBUG nova.compute.manager [req-45d71a5f-c1ac-460b-a914-d557700953ba req-67bc16cf-19ba-4f41-9d2d-db9b8b392f0d service nova] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Received event network-vif-plugged-9674730a-bad7-4d14-8348-d1d12f0c1c89 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 776.463760] env[63345]: DEBUG oslo_concurrency.lockutils [req-45d71a5f-c1ac-460b-a914-d557700953ba req-67bc16cf-19ba-4f41-9d2d-db9b8b392f0d service nova] Acquiring lock "02eb493e-d1a1-4461-8e3f-e493e96fe058-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.463760] env[63345]: DEBUG oslo_concurrency.lockutils [req-45d71a5f-c1ac-460b-a914-d557700953ba req-67bc16cf-19ba-4f41-9d2d-db9b8b392f0d service nova] Lock "02eb493e-d1a1-4461-8e3f-e493e96fe058-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.463760] env[63345]: DEBUG oslo_concurrency.lockutils [req-45d71a5f-c1ac-460b-a914-d557700953ba req-67bc16cf-19ba-4f41-9d2d-db9b8b392f0d service nova] Lock "02eb493e-d1a1-4461-8e3f-e493e96fe058-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.463760] env[63345]: DEBUG nova.compute.manager [req-45d71a5f-c1ac-460b-a914-d557700953ba req-67bc16cf-19ba-4f41-9d2d-db9b8b392f0d service nova] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] No waiting events found dispatching network-vif-plugged-9674730a-bad7-4d14-8348-d1d12f0c1c89 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 776.463760] env[63345]: WARNING nova.compute.manager [req-45d71a5f-c1ac-460b-a914-d557700953ba req-67bc16cf-19ba-4f41-9d2d-db9b8b392f0d service nova] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Received unexpected event network-vif-plugged-9674730a-bad7-4d14-8348-d1d12f0c1c89 for instance with vm_state building and task_state spawning. [ 776.463760] env[63345]: DEBUG nova.compute.manager [req-45d71a5f-c1ac-460b-a914-d557700953ba req-67bc16cf-19ba-4f41-9d2d-db9b8b392f0d service nova] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Received event network-changed-9674730a-bad7-4d14-8348-d1d12f0c1c89 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 776.463760] env[63345]: DEBUG nova.compute.manager [req-45d71a5f-c1ac-460b-a914-d557700953ba req-67bc16cf-19ba-4f41-9d2d-db9b8b392f0d service nova] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Refreshing instance network info cache due to event network-changed-9674730a-bad7-4d14-8348-d1d12f0c1c89. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 776.464466] env[63345]: DEBUG oslo_concurrency.lockutils [req-45d71a5f-c1ac-460b-a914-d557700953ba req-67bc16cf-19ba-4f41-9d2d-db9b8b392f0d service nova] Acquiring lock "refresh_cache-02eb493e-d1a1-4461-8e3f-e493e96fe058" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.486852] env[63345]: DEBUG oslo_concurrency.lockutils [req-fda80f8d-633d-42ae-8ad1-93dcb88fba58 req-da1b3420-9bab-4010-86d1-d0ed1198affe service nova] Releasing lock "refresh_cache-a9b69d13-6330-4f9b-b8e1-1c0017655f9f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.502445] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017044, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.563079] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]520b213d-65da-111b-0fc7-f268a7d9fa1f, 'name': SearchDatastore_Task, 'duration_secs': 0.012452} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.563435] env[63345]: DEBUG oslo_concurrency.lockutils [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.563679] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 776.563918] env[63345]: DEBUG oslo_concurrency.lockutils [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.564083] env[63345]: DEBUG oslo_concurrency.lockutils [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.564273] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 776.564534] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7106c36e-15a0-41e9-9195-7497d2578acb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.575362] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 776.575565] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 776.576344] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79c4b37a-8270-4bce-aa61-e150f56f1639 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.581781] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Waiting for the task: (returnval){ [ 776.581781] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]528a246c-50db-916a-e9c0-5242276c655f" [ 776.581781] env[63345]: _type = "Task" [ 776.581781] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.589725] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]528a246c-50db-916a-e9c0-5242276c655f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.823687] env[63345]: DEBUG nova.compute.manager [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 776.851229] env[63345]: DEBUG nova.scheduler.client.report [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 776.952011] env[63345]: DEBUG nova.network.neutron [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 777.000564] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017044, 'name': ReconfigVM_Task, 'duration_secs': 0.590394} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.003081] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Reconfigured VM instance instance-0000003a to attach disk [datastore2] bcec23fe-75c7-479e-9210-85ca6781d7e5/bcec23fe-75c7-479e-9210-85ca6781d7e5.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 777.004046] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbf7c563-ef1d-423c-8210-5ecc082dc534 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.011152] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 777.011152] env[63345]: value = "task-1017045" [ 777.011152] env[63345]: _type = "Task" [ 777.011152] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.019372] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017045, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.092543] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]528a246c-50db-916a-e9c0-5242276c655f, 'name': SearchDatastore_Task, 'duration_secs': 0.01931} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.093402] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2d8443e-e0fc-493e-a078-bc4fe3b55ce5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.099522] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Waiting for the task: (returnval){ [ 777.099522] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52942477-1ac6-e794-4e6b-7a96f99869a8" [ 777.099522] env[63345]: _type = "Task" [ 777.099522] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.110725] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52942477-1ac6-e794-4e6b-7a96f99869a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.116252] env[63345]: DEBUG nova.network.neutron [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Updating instance_info_cache with network_info: [{"id": "9674730a-bad7-4d14-8348-d1d12f0c1c89", "address": "fa:16:3e:4f:0f:af", "network": {"id": "e8cc2c0b-d6a1-4b73-b7fc-3623527c5707", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1247474236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67787a91980240fdadc8d33bb30e682a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9674730a-ba", "ovs_interfaceid": "9674730a-bad7-4d14-8348-d1d12f0c1c89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.354027] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.357993] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.069s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.360733] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.054s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.362077] env[63345]: INFO nova.compute.claims [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 777.382362] env[63345]: INFO nova.scheduler.client.report [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Deleted allocations for instance 3101726f-5b14-417e-bcf8-390ce1f9b467 [ 777.523298] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017045, 'name': Rename_Task, 'duration_secs': 0.18014} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.523365] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 777.523592] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7611834e-bdd4-456e-b6da-f26b1508ce4f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.531095] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 777.531095] env[63345]: value = "task-1017046" [ 777.531095] env[63345]: _type = "Task" [ 777.531095] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.540250] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017046, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.610540] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52942477-1ac6-e794-4e6b-7a96f99869a8, 'name': SearchDatastore_Task, 'duration_secs': 0.011801} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.610919] env[63345]: DEBUG oslo_concurrency.lockutils [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.611265] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] a9b69d13-6330-4f9b-b8e1-1c0017655f9f/a9b69d13-6330-4f9b-b8e1-1c0017655f9f.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 777.611629] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9eab0ff9-f2e1-46f1-8d63-2d7dde59b33e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.620911] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Releasing lock "refresh_cache-02eb493e-d1a1-4461-8e3f-e493e96fe058" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.621127] env[63345]: DEBUG nova.compute.manager [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Instance network_info: |[{"id": "9674730a-bad7-4d14-8348-d1d12f0c1c89", "address": "fa:16:3e:4f:0f:af", "network": {"id": "e8cc2c0b-d6a1-4b73-b7fc-3623527c5707", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1247474236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67787a91980240fdadc8d33bb30e682a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9674730a-ba", "ovs_interfaceid": "9674730a-bad7-4d14-8348-d1d12f0c1c89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 777.622173] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Waiting for the task: (returnval){ [ 777.622173] env[63345]: value = "task-1017047" [ 777.622173] env[63345]: _type = "Task" [ 777.622173] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.622444] env[63345]: DEBUG oslo_concurrency.lockutils [req-45d71a5f-c1ac-460b-a914-d557700953ba req-67bc16cf-19ba-4f41-9d2d-db9b8b392f0d service nova] Acquired lock "refresh_cache-02eb493e-d1a1-4461-8e3f-e493e96fe058" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.622635] env[63345]: DEBUG nova.network.neutron [req-45d71a5f-c1ac-460b-a914-d557700953ba req-67bc16cf-19ba-4f41-9d2d-db9b8b392f0d service nova] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Refreshing network info cache for port 9674730a-bad7-4d14-8348-d1d12f0c1c89 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 777.623931] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:0f:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '57d4be17-536f-4a81-bea9-6547bd50f4a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9674730a-bad7-4d14-8348-d1d12f0c1c89', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 777.632958] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Creating folder: Project (67787a91980240fdadc8d33bb30e682a). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 777.636473] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3bd55a11-fb14-45eb-b419-6b4f1c53a7d0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.647477] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': task-1017047, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.650379] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Created folder: Project (67787a91980240fdadc8d33bb30e682a) in parent group-v225918. [ 777.652673] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Creating folder: Instances. Parent ref: group-v226032. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 777.652673] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c6eff9e7-5900-47a7-8166-2d3a0ccedf38 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.659538] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Created folder: Instances in parent group-v226032. [ 777.660868] env[63345]: DEBUG oslo.service.loopingcall [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 777.660868] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 777.660868] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8883cc7-5935-4909-a1cd-04be5b43e8d3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.682193] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 777.682193] env[63345]: value = "task-1017050" [ 777.682193] env[63345]: _type = "Task" [ 777.682193] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.690539] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017050, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.892279] env[63345]: DEBUG oslo_concurrency.lockutils [None req-de07fc32-5e9e-4308-a572-a207df5d5aa9 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "3101726f-5b14-417e-bcf8-390ce1f9b467" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.904s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.958711] env[63345]: DEBUG nova.network.neutron [req-45d71a5f-c1ac-460b-a914-d557700953ba req-67bc16cf-19ba-4f41-9d2d-db9b8b392f0d service nova] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Updated VIF entry in instance network info cache for port 9674730a-bad7-4d14-8348-d1d12f0c1c89. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 777.958711] env[63345]: DEBUG nova.network.neutron [req-45d71a5f-c1ac-460b-a914-d557700953ba req-67bc16cf-19ba-4f41-9d2d-db9b8b392f0d service nova] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Updating instance_info_cache with network_info: [{"id": "9674730a-bad7-4d14-8348-d1d12f0c1c89", "address": "fa:16:3e:4f:0f:af", "network": {"id": "e8cc2c0b-d6a1-4b73-b7fc-3623527c5707", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1247474236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67787a91980240fdadc8d33bb30e682a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9674730a-ba", "ovs_interfaceid": "9674730a-bad7-4d14-8348-d1d12f0c1c89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.041662] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017046, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.144386] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': task-1017047, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.193341] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017050, 'name': CreateVM_Task, 'duration_secs': 0.399124} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.193935] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 778.194809] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.195128] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.195543] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 778.195933] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53e6dddc-2e18-420c-8f43-fd19a74c63f3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.203126] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Waiting for the task: (returnval){ [ 778.203126] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52e07610-a1d9-6f6a-f265-02681c8dd2c1" [ 778.203126] env[63345]: _type = "Task" [ 778.203126] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.212459] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e07610-a1d9-6f6a-f265-02681c8dd2c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.299990] env[63345]: INFO nova.compute.manager [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Rebuilding instance [ 778.350794] env[63345]: DEBUG nova.compute.manager [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 778.351748] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6c3f82-500a-453a-a9a9-b3c468b695d8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.460822] env[63345]: DEBUG oslo_concurrency.lockutils [req-45d71a5f-c1ac-460b-a914-d557700953ba req-67bc16cf-19ba-4f41-9d2d-db9b8b392f0d service nova] Releasing lock "refresh_cache-02eb493e-d1a1-4461-8e3f-e493e96fe058" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.544523] env[63345]: DEBUG oslo_vmware.api [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017046, 'name': PowerOnVM_Task, 'duration_secs': 0.624403} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.544817] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 778.545081] env[63345]: INFO nova.compute.manager [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Took 8.53 seconds to spawn the instance on the hypervisor. [ 778.545542] env[63345]: DEBUG nova.compute.manager [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 778.546318] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b221a99-903c-4962-a415-ada558ef1175 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.645486] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': task-1017047, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.661864} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.645820] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] a9b69d13-6330-4f9b-b8e1-1c0017655f9f/a9b69d13-6330-4f9b-b8e1-1c0017655f9f.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 778.646153] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 778.646437] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9a641548-9c17-4c54-806f-fb297a24dfcb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.656768] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Waiting for the task: (returnval){ [ 778.656768] env[63345]: value = "task-1017051" [ 778.656768] env[63345]: _type = "Task" [ 778.656768] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.666874] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': task-1017051, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.712942] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e07610-a1d9-6f6a-f265-02681c8dd2c1, 'name': SearchDatastore_Task, 'duration_secs': 0.051678} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.715797] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.715977] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 778.716241] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.716397] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.716581] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 778.717297] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f455f7df-6cf8-4621-bb51-00d9589ac562 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.726917] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 778.727134] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 778.730965] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbe33cea-8452-4d77-b200-1f35c8319d8a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.738662] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Waiting for the task: (returnval){ [ 778.738662] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]524d101c-1880-3220-d545-f2fadd04aab4" [ 778.738662] env[63345]: _type = "Task" [ 778.738662] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.746204] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524d101c-1880-3220-d545-f2fadd04aab4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.851035] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4213a75d-0908-44a8-a079-4c145c80ee0c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.859037] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9b2189-8cdc-4657-a055-41aad6e5676b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.896229] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5651f93e-22c1-4cdd-a0e3-b15f77d9dd20 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.904200] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42fcf908-69e4-4d84-99a1-ad2a27df3c1a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.918656] env[63345]: DEBUG nova.compute.provider_tree [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.079756] env[63345]: INFO nova.compute.manager [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Took 40.92 seconds to build instance. [ 779.168499] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': task-1017051, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.175922} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.168906] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 779.169992] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60f407b-396b-4a52-99b3-b2c11464b5dc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.203901] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] a9b69d13-6330-4f9b-b8e1-1c0017655f9f/a9b69d13-6330-4f9b-b8e1-1c0017655f9f.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 779.203901] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-857c498a-46ee-4342-ba61-0fbd74dc1cfb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.227972] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Waiting for the task: (returnval){ [ 779.227972] env[63345]: value = "task-1017052" [ 779.227972] env[63345]: _type = "Task" [ 779.227972] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.238652] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': task-1017052, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.249476] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524d101c-1880-3220-d545-f2fadd04aab4, 'name': SearchDatastore_Task, 'duration_secs': 0.017947} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.250373] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e48fb76c-772c-4e8c-95c0-121544519382 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.255883] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Waiting for the task: (returnval){ [ 779.255883] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]522cb56e-18fe-a30e-40f7-f02be22c6cb8" [ 779.255883] env[63345]: _type = "Task" [ 779.255883] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.263545] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]522cb56e-18fe-a30e-40f7-f02be22c6cb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.366290] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 779.366638] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0346db2e-c6ef-4eee-8206-e90e69866580 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.373480] env[63345]: DEBUG oslo_vmware.api [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Waiting for the task: (returnval){ [ 779.373480] env[63345]: value = "task-1017053" [ 779.373480] env[63345]: _type = "Task" [ 779.373480] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.381882] env[63345]: DEBUG oslo_vmware.api [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1017053, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.422414] env[63345]: DEBUG nova.scheduler.client.report [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 779.527981] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "bcec23fe-75c7-479e-9210-85ca6781d7e5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.583022] env[63345]: DEBUG oslo_concurrency.lockutils [None req-92c4ac9d-fca0-471c-a97a-4fe23a5d736f tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "bcec23fe-75c7-479e-9210-85ca6781d7e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.368s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.583376] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "bcec23fe-75c7-479e-9210-85ca6781d7e5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.056s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.583601] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "bcec23fe-75c7-479e-9210-85ca6781d7e5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.583826] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "bcec23fe-75c7-479e-9210-85ca6781d7e5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.583999] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "bcec23fe-75c7-479e-9210-85ca6781d7e5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.586974] env[63345]: INFO nova.compute.manager [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Terminating instance [ 779.740650] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': task-1017052, 'name': ReconfigVM_Task, 'duration_secs': 0.457744} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.740650] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Reconfigured VM instance instance-0000003b to attach disk [datastore2] a9b69d13-6330-4f9b-b8e1-1c0017655f9f/a9b69d13-6330-4f9b-b8e1-1c0017655f9f.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 779.740650] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2bfe2839-269d-49f9-aa8a-e689df88b1df {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.747213] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Waiting for the task: (returnval){ [ 779.747213] env[63345]: value = "task-1017054" [ 779.747213] env[63345]: _type = "Task" [ 779.747213] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.755692] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': task-1017054, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.767019] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]522cb56e-18fe-a30e-40f7-f02be22c6cb8, 'name': SearchDatastore_Task, 'duration_secs': 0.01418} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.767019] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.767019] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 02eb493e-d1a1-4461-8e3f-e493e96fe058/02eb493e-d1a1-4461-8e3f-e493e96fe058.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 779.767019] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2e5d9f2-6fe2-4d58-a83d-18a066594d9c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.770912] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Waiting for the task: (returnval){ [ 779.770912] env[63345]: value = "task-1017055" [ 779.770912] env[63345]: _type = "Task" [ 779.770912] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.778227] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': task-1017055, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.887715] env[63345]: DEBUG oslo_vmware.api [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1017053, 'name': PowerOffVM_Task, 'duration_secs': 0.281896} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.889500] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 779.890273] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 779.890622] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bfcb586b-4105-49c8-bc99-09e622d5378b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.901439] env[63345]: DEBUG oslo_vmware.api [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Waiting for the task: (returnval){ [ 779.901439] env[63345]: value = "task-1017056" [ 779.901439] env[63345]: _type = "Task" [ 779.901439] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.916938] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] VM already powered off {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 779.917166] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Volume detach. Driver type: vmdk {{(pid=63345) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 779.917390] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-225937', 'volume_id': 'fcaa69f8-c32a-43e4-8f84-b58e01f1b245', 'name': 'volume-fcaa69f8-c32a-43e4-8f84-b58e01f1b245', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b0d115d-dad5-4881-a0e0-b98f555da533', 'attached_at': '', 'detached_at': '', 'volume_id': 'fcaa69f8-c32a-43e4-8f84-b58e01f1b245', 'serial': 'fcaa69f8-c32a-43e4-8f84-b58e01f1b245'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 779.918189] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc098cf3-70ff-4030-a87f-49d3db756646 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.937288] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.577s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.937853] env[63345]: DEBUG nova.compute.manager [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 779.944019] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.739s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.944019] env[63345]: INFO nova.compute.claims [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 779.946016] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fadf61e-e027-4353-bf4d-f23e97127be1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.955251] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a76ba44f-e200-49e4-9992-147431efd7ae {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.982692] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db4f930-063e-46b7-9842-d4c83a0f6933 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.999981] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] The volume has not been displaced from its original location: [datastore2] volume-fcaa69f8-c32a-43e4-8f84-b58e01f1b245/volume-fcaa69f8-c32a-43e4-8f84-b58e01f1b245.vmdk. No consolidation needed. {{(pid=63345) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 780.005389] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Reconfiguring VM instance instance-00000031 to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 780.006237] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10530439-f0e7-4327-b368-45caf171f26d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.025127] env[63345]: DEBUG oslo_vmware.api [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Waiting for the task: (returnval){ [ 780.025127] env[63345]: value = "task-1017057" [ 780.025127] env[63345]: _type = "Task" [ 780.025127] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.034124] env[63345]: DEBUG oslo_vmware.api [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1017057, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.091548] env[63345]: DEBUG nova.compute.manager [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 780.091824] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 780.092780] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99bb65f2-ec8a-4e31-8324-7ff58138d0c3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.101212] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 780.101549] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e999a66a-7a65-4f95-a6cd-18714a266deb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.108090] env[63345]: DEBUG oslo_vmware.api [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 780.108090] env[63345]: value = "task-1017058" [ 780.108090] env[63345]: _type = "Task" [ 780.108090] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.116975] env[63345]: DEBUG oslo_vmware.api [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017058, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.259135] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': task-1017054, 'name': Rename_Task, 'duration_secs': 0.234772} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.259378] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 780.259619] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-15cda7f0-44ac-449e-ad15-8f3284912fac {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.267912] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Waiting for the task: (returnval){ [ 780.267912] env[63345]: value = "task-1017059" [ 780.267912] env[63345]: _type = "Task" [ 780.267912] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.282689] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': task-1017059, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.294932] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': task-1017055, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.454270] env[63345]: DEBUG nova.compute.utils [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 780.455910] env[63345]: DEBUG nova.compute.manager [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 780.456135] env[63345]: DEBUG nova.network.neutron [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 780.506461] env[63345]: DEBUG nova.policy [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '068ed9798242455c937160c473990c49', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '307c1bea8f6d47ddb4d5ebac8bba25ed', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 780.537338] env[63345]: DEBUG oslo_vmware.api [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1017057, 'name': ReconfigVM_Task, 'duration_secs': 0.181304} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.537740] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Reconfigured VM instance instance-00000031 to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 780.544738] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-133914f4-285d-4565-a1a9-0c287f7d9030 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.565931] env[63345]: DEBUG oslo_vmware.api [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Waiting for the task: (returnval){ [ 780.565931] env[63345]: value = "task-1017060" [ 780.565931] env[63345]: _type = "Task" [ 780.565931] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.579354] env[63345]: DEBUG oslo_vmware.api [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1017060, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.619646] env[63345]: DEBUG oslo_vmware.api [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017058, 'name': PowerOffVM_Task, 'duration_secs': 0.245065} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.619646] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 780.619646] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 780.619646] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b716d1bc-ea68-4aeb-818b-64677a8bf3aa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.737520] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 780.740488] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 780.740488] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Deleting the datastore file [datastore2] bcec23fe-75c7-479e-9210-85ca6781d7e5 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 780.740745] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3b81ebe-dbb3-4f80-89d2-2b325255ae9e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.750157] env[63345]: DEBUG oslo_vmware.api [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 780.750157] env[63345]: value = "task-1017062" [ 780.750157] env[63345]: _type = "Task" [ 780.750157] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.762114] env[63345]: DEBUG oslo_vmware.api [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017062, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.777404] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': task-1017059, 'name': PowerOnVM_Task} progress is 76%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.791367] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "df2f06af-54a6-4dbd-83ff-1e4b066acbf3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.791638] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "df2f06af-54a6-4dbd-83ff-1e4b066acbf3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.792787] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': task-1017055, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.699322} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.793242] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 02eb493e-d1a1-4461-8e3f-e493e96fe058/02eb493e-d1a1-4461-8e3f-e493e96fe058.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 780.793331] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 780.793926] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bba4f0e7-876d-421b-8247-13381f7ba098 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.800907] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Waiting for the task: (returnval){ [ 780.800907] env[63345]: value = "task-1017063" [ 780.800907] env[63345]: _type = "Task" [ 780.800907] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.810986] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': task-1017063, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.962262] env[63345]: DEBUG nova.compute.manager [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 780.977550] env[63345]: DEBUG nova.network.neutron [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Successfully created port: c6c991f3-51b9-4502-af97-3ca846db3c73 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 781.079722] env[63345]: DEBUG oslo_vmware.api [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1017060, 'name': ReconfigVM_Task, 'duration_secs': 0.299197} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.083355] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-225937', 'volume_id': 'fcaa69f8-c32a-43e4-8f84-b58e01f1b245', 'name': 'volume-fcaa69f8-c32a-43e4-8f84-b58e01f1b245', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b0d115d-dad5-4881-a0e0-b98f555da533', 'attached_at': '', 'detached_at': '', 'volume_id': 'fcaa69f8-c32a-43e4-8f84-b58e01f1b245', 'serial': 'fcaa69f8-c32a-43e4-8f84-b58e01f1b245'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 781.083715] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 781.085057] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe85bb4-d1ab-48f6-a257-966e71e0581c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.092010] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 781.092275] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1790bb5e-90a8-4c9c-b8c2-38078e1fb585 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.176683] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 781.176793] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 781.177049] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Deleting the datastore file [datastore2] 3b0d115d-dad5-4881-a0e0-b98f555da533 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 781.177393] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6332ff71-f8cc-45f1-9ea4-e7b877a49842 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.184099] env[63345]: DEBUG oslo_vmware.api [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Waiting for the task: (returnval){ [ 781.184099] env[63345]: value = "task-1017065" [ 781.184099] env[63345]: _type = "Task" [ 781.184099] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.193686] env[63345]: DEBUG oslo_vmware.api [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1017065, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.264094] env[63345]: DEBUG oslo_vmware.api [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017062, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.282018] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': task-1017059, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.294689] env[63345]: DEBUG nova.compute.manager [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 781.316022] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': task-1017063, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06178} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.319114] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 781.320404] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f07bcf-e2e1-4ee9-a4f1-6254b2c6806c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.347228] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] 02eb493e-d1a1-4461-8e3f-e493e96fe058/02eb493e-d1a1-4461-8e3f-e493e96fe058.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 781.350175] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffa0e1ce-2cff-4332-8dc4-626da005aa6f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.371114] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Waiting for the task: (returnval){ [ 781.371114] env[63345]: value = "task-1017066" [ 781.371114] env[63345]: _type = "Task" [ 781.371114] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.382574] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': task-1017066, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.552445] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52093044-747b-4038-bcb0-9ba387c4840a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.563946] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239ac79d-5bb1-405e-a6d5-4598d0af5be7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.594997] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2580c76d-fdf8-4b80-844b-eade66689312 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.602465] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdac0126-ef5d-4fda-81e5-2604162c984f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.619348] env[63345]: DEBUG nova.compute.provider_tree [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.694935] env[63345]: DEBUG oslo_vmware.api [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Task: {'id': task-1017065, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.309648} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.695244] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 781.695445] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 781.695628] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 781.767988] env[63345]: DEBUG oslo_vmware.api [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017062, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.520494} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.768302] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 781.768497] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 781.772030] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 781.772030] env[63345]: INFO nova.compute.manager [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Took 1.68 seconds to destroy the instance on the hypervisor. [ 781.772030] env[63345]: DEBUG oslo.service.loopingcall [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 781.772030] env[63345]: DEBUG nova.compute.manager [-] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 781.772030] env[63345]: DEBUG nova.network.neutron [-] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 781.776971] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Volume detach. Driver type: vmdk {{(pid=63345) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 781.777075] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-16678468-abfb-4275-ba53-32a8bb0690e1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.784027] env[63345]: DEBUG oslo_vmware.api [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': task-1017059, 'name': PowerOnVM_Task, 'duration_secs': 1.045998} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.788022] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 781.788022] env[63345]: INFO nova.compute.manager [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Took 9.20 seconds to spawn the instance on the hypervisor. [ 781.788022] env[63345]: DEBUG nova.compute.manager [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 781.788022] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd83956-df0f-4067-8dd9-ebce8558c1e5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.792836] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcc5e7ec-dd3d-4406-8844-c36de78203c8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.833942] env[63345]: ERROR nova.compute.manager [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Failed to detach volume fcaa69f8-c32a-43e4-8f84-b58e01f1b245 from /dev/sda: nova.exception.InstanceNotFound: Instance 3b0d115d-dad5-4881-a0e0-b98f555da533 could not be found. [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Traceback (most recent call last): [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/compute/manager.py", line 4184, in _do_rebuild_instance [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] self.driver.rebuild(**kwargs) [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] raise NotImplementedError() [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] NotImplementedError [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] During handling of the above exception, another exception occurred: [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Traceback (most recent call last): [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/compute/manager.py", line 3607, in _detach_root_volume [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] self.driver.detach_volume(context, old_connection_info, [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 555, in detach_volume [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] return self._volumeops.detach_volume(connection_info, instance) [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] self._detach_volume_vmdk(connection_info, instance) [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1135, in get_vm_ref [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] stable_ref.fetch_moref(session) [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1126, in fetch_moref [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] raise exception.InstanceNotFound(instance_id=self._uuid) [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] nova.exception.InstanceNotFound: Instance 3b0d115d-dad5-4881-a0e0-b98f555da533 could not be found. [ 781.833942] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] [ 781.837714] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.882857] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': task-1017066, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.976867] env[63345]: DEBUG nova.compute.manager [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 782.006898] env[63345]: DEBUG nova.compute.utils [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Build of instance 3b0d115d-dad5-4881-a0e0-b98f555da533 aborted: Failed to rebuild volume backed instance. {{(pid=63345) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 782.009331] env[63345]: ERROR nova.compute.manager [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 3b0d115d-dad5-4881-a0e0-b98f555da533 aborted: Failed to rebuild volume backed instance. [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Traceback (most recent call last): [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/compute/manager.py", line 4184, in _do_rebuild_instance [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] self.driver.rebuild(**kwargs) [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] raise NotImplementedError() [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] NotImplementedError [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] During handling of the above exception, another exception occurred: [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Traceback (most recent call last): [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/compute/manager.py", line 3642, in _rebuild_volume_backed_instance [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] self._detach_root_volume(context, instance, root_bdm) [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/compute/manager.py", line 3621, in _detach_root_volume [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] with excutils.save_and_reraise_exception(): [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] self.force_reraise() [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] raise self.value [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/compute/manager.py", line 3607, in _detach_root_volume [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] self.driver.detach_volume(context, old_connection_info, [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 555, in detach_volume [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] return self._volumeops.detach_volume(connection_info, instance) [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] self._detach_volume_vmdk(connection_info, instance) [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1135, in get_vm_ref [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] stable_ref.fetch_moref(session) [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1126, in fetch_moref [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] raise exception.InstanceNotFound(instance_id=self._uuid) [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] nova.exception.InstanceNotFound: Instance 3b0d115d-dad5-4881-a0e0-b98f555da533 could not be found. [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] During handling of the above exception, another exception occurred: [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Traceback (most recent call last): [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/compute/manager.py", line 11194, in _error_out_instance_on_exception [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] yield [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/compute/manager.py", line 3910, in rebuild_instance [ 782.009331] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] self._do_rebuild_instance_with_claim( [ 782.010402] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/compute/manager.py", line 3996, in _do_rebuild_instance_with_claim [ 782.010402] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] self._do_rebuild_instance( [ 782.010402] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/compute/manager.py", line 4188, in _do_rebuild_instance [ 782.010402] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] self._rebuild_default_impl(**kwargs) [ 782.010402] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/compute/manager.py", line 3765, in _rebuild_default_impl [ 782.010402] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] self._rebuild_volume_backed_instance( [ 782.010402] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] File "/opt/stack/nova/nova/compute/manager.py", line 3657, in _rebuild_volume_backed_instance [ 782.010402] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] raise exception.BuildAbortException( [ 782.010402] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] nova.exception.BuildAbortException: Build of instance 3b0d115d-dad5-4881-a0e0-b98f555da533 aborted: Failed to rebuild volume backed instance. [ 782.010402] env[63345]: ERROR nova.compute.manager [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] [ 782.014118] env[63345]: DEBUG nova.virt.hardware [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 782.014369] env[63345]: DEBUG nova.virt.hardware [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 782.014693] env[63345]: DEBUG nova.virt.hardware [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 782.014775] env[63345]: DEBUG nova.virt.hardware [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 782.014867] env[63345]: DEBUG nova.virt.hardware [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 782.015065] env[63345]: DEBUG nova.virt.hardware [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 782.015309] env[63345]: DEBUG nova.virt.hardware [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 782.015479] env[63345]: DEBUG nova.virt.hardware [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 782.015648] env[63345]: DEBUG nova.virt.hardware [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 782.015806] env[63345]: DEBUG nova.virt.hardware [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 782.015989] env[63345]: DEBUG nova.virt.hardware [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 782.017020] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db8a6c98-803e-4d81-81cc-193c597701c3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.025930] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a9105c-012e-4a5a-bbfb-9aa19757df51 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.121657] env[63345]: DEBUG nova.scheduler.client.report [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 782.324695] env[63345]: INFO nova.compute.manager [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Took 42.91 seconds to build instance. [ 782.382437] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': task-1017066, 'name': ReconfigVM_Task, 'duration_secs': 0.757112} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.382437] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Reconfigured VM instance instance-0000003c to attach disk [datastore2] 02eb493e-d1a1-4461-8e3f-e493e96fe058/02eb493e-d1a1-4461-8e3f-e493e96fe058.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 782.383102] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b6165f9-743e-4fb4-9f4c-73b3146b62c9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.389846] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Waiting for the task: (returnval){ [ 782.389846] env[63345]: value = "task-1017067" [ 782.389846] env[63345]: _type = "Task" [ 782.389846] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.400862] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': task-1017067, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.406311] env[63345]: DEBUG nova.compute.manager [req-92a472e6-f2ac-4d0c-a569-d3862945771f req-1458c413-15bf-48ad-a2a8-cc58e9e34221 service nova] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Received event network-vif-deleted-34e64073-eaa9-42e5-b620-b216a15b02d1 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 782.406523] env[63345]: INFO nova.compute.manager [req-92a472e6-f2ac-4d0c-a569-d3862945771f req-1458c413-15bf-48ad-a2a8-cc58e9e34221 service nova] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Neutron deleted interface 34e64073-eaa9-42e5-b620-b216a15b02d1; detaching it from the instance and deleting it from the info cache [ 782.406706] env[63345]: DEBUG nova.network.neutron [req-92a472e6-f2ac-4d0c-a569-d3862945771f req-1458c413-15bf-48ad-a2a8-cc58e9e34221 service nova] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.628390] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.688s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.628941] env[63345]: DEBUG nova.compute.manager [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 782.633247] env[63345]: DEBUG nova.network.neutron [-] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.635031] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.373s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.635031] env[63345]: DEBUG nova.objects.instance [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lazy-loading 'resources' on Instance uuid e6bc8cb9-2f1a-49cb-974d-ea9a211126ee {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 782.740118] env[63345]: DEBUG nova.network.neutron [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Successfully updated port: c6c991f3-51b9-4502-af97-3ca846db3c73 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 782.826746] env[63345]: DEBUG oslo_concurrency.lockutils [None req-850c47e9-4e83-460e-bb4e-9175ff04458b tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Lock "a9b69d13-6330-4f9b-b8e1-1c0017655f9f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.895s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.899342] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': task-1017067, 'name': Rename_Task, 'duration_secs': 0.141271} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.899668] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 782.899924] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab45bd4a-485d-4f4a-8278-ce31952361e8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.906525] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Waiting for the task: (returnval){ [ 782.906525] env[63345]: value = "task-1017068" [ 782.906525] env[63345]: _type = "Task" [ 782.906525] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.910815] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-720eb9e9-12c9-4abd-911e-723711f0841c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.917824] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': task-1017068, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.921829] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2a4fae-83a5-43d2-b807-06879389e021 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.959241] env[63345]: DEBUG nova.compute.manager [req-92a472e6-f2ac-4d0c-a569-d3862945771f req-1458c413-15bf-48ad-a2a8-cc58e9e34221 service nova] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Detach interface failed, port_id=34e64073-eaa9-42e5-b620-b216a15b02d1, reason: Instance bcec23fe-75c7-479e-9210-85ca6781d7e5 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 783.024632] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Acquiring lock "37f269fe-0266-4c03-9641-e6f43072657a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.024894] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Lock "37f269fe-0266-4c03-9641-e6f43072657a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.137409] env[63345]: DEBUG nova.compute.utils [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 783.138893] env[63345]: INFO nova.compute.manager [-] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Took 1.37 seconds to deallocate network for instance. [ 783.144686] env[63345]: DEBUG nova.compute.manager [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 783.144686] env[63345]: DEBUG nova.network.neutron [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 783.205685] env[63345]: DEBUG nova.policy [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '95beadfb03494623a82f12bdc4e61214', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c17e4fcceb7439183e78786579ac4f0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 783.252896] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Acquiring lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 783.253110] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Acquired lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.254338] env[63345]: DEBUG nova.network.neutron [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 783.416540] env[63345]: DEBUG oslo_vmware.api [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': task-1017068, 'name': PowerOnVM_Task, 'duration_secs': 0.472171} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.419802] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 783.420051] env[63345]: INFO nova.compute.manager [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Took 8.24 seconds to spawn the instance on the hypervisor. [ 783.420228] env[63345]: DEBUG nova.compute.manager [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 783.421263] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53eaa081-e611-4b0c-a88a-fb6f76eae41e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.532033] env[63345]: DEBUG nova.compute.manager [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 783.637079] env[63345]: DEBUG nova.network.neutron [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Successfully created port: 253ec25c-f8ef-41b2-a789-844509636178 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 783.647348] env[63345]: DEBUG nova.compute.manager [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 783.652129] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.662411] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5370fea1-bb7b-464f-9c41-07989784ef87 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.675595] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26ee43b-af54-40a6-afdc-57d946e125aa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.708995] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e0fd01e-cad3-4ad2-a702-27c805ab49db {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.719522] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f47d37a-0b7c-42ae-9dc4-dadfa5be3bb0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.737119] env[63345]: DEBUG nova.compute.provider_tree [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.805715] env[63345]: DEBUG nova.network.neutron [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 783.948140] env[63345]: INFO nova.compute.manager [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Took 41.34 seconds to build instance. [ 784.014835] env[63345]: DEBUG nova.network.neutron [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Updating instance_info_cache with network_info: [{"id": "c6c991f3-51b9-4502-af97-3ca846db3c73", "address": "fa:16:3e:87:40:da", "network": {"id": "e38fba0e-9c96-4a09-b0f0-08546e52eba6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-944344960-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "307c1bea8f6d47ddb4d5ebac8bba25ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6c991f3-51", "ovs_interfaceid": "c6c991f3-51b9-4502-af97-3ca846db3c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.042069] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.054205] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.209102] env[63345]: DEBUG oslo_concurrency.lockutils [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "interface-85fb1ecd-4ca3-401d-a87a-131f0b275506-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.209537] env[63345]: DEBUG oslo_concurrency.lockutils [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-85fb1ecd-4ca3-401d-a87a-131f0b275506-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.210058] env[63345]: DEBUG nova.objects.instance [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lazy-loading 'flavor' on Instance uuid 85fb1ecd-4ca3-401d-a87a-131f0b275506 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 784.236314] env[63345]: DEBUG oslo_concurrency.lockutils [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.236640] env[63345]: DEBUG oslo_concurrency.lockutils [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.239708] env[63345]: DEBUG nova.scheduler.client.report [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 784.450740] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d5b8713-db86-415e-863c-7de55b824483 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Lock "02eb493e-d1a1-4461-8e3f-e493e96fe058" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.310s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.516640] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Releasing lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 784.517120] env[63345]: DEBUG nova.compute.manager [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Instance network_info: |[{"id": "c6c991f3-51b9-4502-af97-3ca846db3c73", "address": "fa:16:3e:87:40:da", "network": {"id": "e38fba0e-9c96-4a09-b0f0-08546e52eba6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-944344960-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "307c1bea8f6d47ddb4d5ebac8bba25ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6c991f3-51", "ovs_interfaceid": "c6c991f3-51b9-4502-af97-3ca846db3c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 784.517556] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:40:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '721e64ee-fc02-4eb5-9c8c-ea55647a1b92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c6c991f3-51b9-4502-af97-3ca846db3c73', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 784.525150] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Creating folder: Project (307c1bea8f6d47ddb4d5ebac8bba25ed). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 784.525779] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8c3c684-dbd7-4be9-8ce6-ed63f661fa0f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.537877] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Created folder: Project (307c1bea8f6d47ddb4d5ebac8bba25ed) in parent group-v225918. [ 784.538109] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Creating folder: Instances. Parent ref: group-v226035. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 784.538363] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-199d8b2f-1f56-4f8e-a39d-ee6c19eb7092 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.547800] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Created folder: Instances in parent group-v226035. [ 784.548061] env[63345]: DEBUG oslo.service.loopingcall [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 784.548274] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 784.548495] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbe49666-7788-4cd0-a5ce-e7be70fa3e37 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.567755] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 784.567755] env[63345]: value = "task-1017071" [ 784.567755] env[63345]: _type = "Task" [ 784.567755] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.575029] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017071, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.661044] env[63345]: DEBUG nova.compute.manager [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 784.688192] env[63345]: DEBUG nova.virt.hardware [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 784.688474] env[63345]: DEBUG nova.virt.hardware [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 784.688642] env[63345]: DEBUG nova.virt.hardware [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 784.688834] env[63345]: DEBUG nova.virt.hardware [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 784.688985] env[63345]: DEBUG nova.virt.hardware [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 784.689151] env[63345]: DEBUG nova.virt.hardware [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 784.689377] env[63345]: DEBUG nova.virt.hardware [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 784.689544] env[63345]: DEBUG nova.virt.hardware [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 784.689715] env[63345]: DEBUG nova.virt.hardware [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 784.689885] env[63345]: DEBUG nova.virt.hardware [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 784.690240] env[63345]: DEBUG nova.virt.hardware [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 784.691196] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78275e53-bbee-4468-89ec-da5aefc5dfe2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.699346] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc6b3fa-e592-4073-ac85-2ce466082130 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.744886] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.110s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.747090] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.970s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.747327] env[63345]: DEBUG nova.objects.instance [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Lazy-loading 'resources' on Instance uuid d3e99100-f13f-4019-9b5a-adaa65dacc5f {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 784.767458] env[63345]: INFO nova.scheduler.client.report [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Deleted allocations for instance e6bc8cb9-2f1a-49cb-974d-ea9a211126ee [ 784.785541] env[63345]: DEBUG nova.objects.instance [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lazy-loading 'pci_requests' on Instance uuid 85fb1ecd-4ca3-401d-a87a-131f0b275506 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 784.803376] env[63345]: DEBUG oslo_concurrency.lockutils [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Acquiring lock "3b0d115d-dad5-4881-a0e0-b98f555da533" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.803659] env[63345]: DEBUG oslo_concurrency.lockutils [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Lock "3b0d115d-dad5-4881-a0e0-b98f555da533" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.803844] env[63345]: DEBUG oslo_concurrency.lockutils [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Acquiring lock "3b0d115d-dad5-4881-a0e0-b98f555da533-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.804139] env[63345]: DEBUG oslo_concurrency.lockutils [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Lock "3b0d115d-dad5-4881-a0e0-b98f555da533-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.804423] env[63345]: DEBUG oslo_concurrency.lockutils [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Lock "3b0d115d-dad5-4881-a0e0-b98f555da533-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.806654] env[63345]: INFO nova.compute.manager [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Terminating instance [ 784.953815] env[63345]: DEBUG nova.compute.manager [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 785.040017] env[63345]: DEBUG nova.compute.manager [req-673280b6-239f-468e-b972-56c48aab5975 req-3dd2d721-4467-44d5-a0c2-4a5e40c70720 service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Received event network-vif-plugged-c6c991f3-51b9-4502-af97-3ca846db3c73 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 785.040017] env[63345]: DEBUG oslo_concurrency.lockutils [req-673280b6-239f-468e-b972-56c48aab5975 req-3dd2d721-4467-44d5-a0c2-4a5e40c70720 service nova] Acquiring lock "00c58889-75f7-4a4b-a5a3-a45723c1f495-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.040017] env[63345]: DEBUG oslo_concurrency.lockutils [req-673280b6-239f-468e-b972-56c48aab5975 req-3dd2d721-4467-44d5-a0c2-4a5e40c70720 service nova] Lock "00c58889-75f7-4a4b-a5a3-a45723c1f495-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.040017] env[63345]: DEBUG oslo_concurrency.lockutils [req-673280b6-239f-468e-b972-56c48aab5975 req-3dd2d721-4467-44d5-a0c2-4a5e40c70720 service nova] Lock "00c58889-75f7-4a4b-a5a3-a45723c1f495-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.040017] env[63345]: DEBUG nova.compute.manager [req-673280b6-239f-468e-b972-56c48aab5975 req-3dd2d721-4467-44d5-a0c2-4a5e40c70720 service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] No waiting events found dispatching network-vif-plugged-c6c991f3-51b9-4502-af97-3ca846db3c73 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 785.040017] env[63345]: WARNING nova.compute.manager [req-673280b6-239f-468e-b972-56c48aab5975 req-3dd2d721-4467-44d5-a0c2-4a5e40c70720 service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Received unexpected event network-vif-plugged-c6c991f3-51b9-4502-af97-3ca846db3c73 for instance with vm_state building and task_state spawning. [ 785.040017] env[63345]: DEBUG nova.compute.manager [req-673280b6-239f-468e-b972-56c48aab5975 req-3dd2d721-4467-44d5-a0c2-4a5e40c70720 service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Received event network-changed-c6c991f3-51b9-4502-af97-3ca846db3c73 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 785.040017] env[63345]: DEBUG nova.compute.manager [req-673280b6-239f-468e-b972-56c48aab5975 req-3dd2d721-4467-44d5-a0c2-4a5e40c70720 service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Refreshing instance network info cache due to event network-changed-c6c991f3-51b9-4502-af97-3ca846db3c73. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 785.040017] env[63345]: DEBUG oslo_concurrency.lockutils [req-673280b6-239f-468e-b972-56c48aab5975 req-3dd2d721-4467-44d5-a0c2-4a5e40c70720 service nova] Acquiring lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.040749] env[63345]: DEBUG oslo_concurrency.lockutils [req-673280b6-239f-468e-b972-56c48aab5975 req-3dd2d721-4467-44d5-a0c2-4a5e40c70720 service nova] Acquired lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.040749] env[63345]: DEBUG nova.network.neutron [req-673280b6-239f-468e-b972-56c48aab5975 req-3dd2d721-4467-44d5-a0c2-4a5e40c70720 service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Refreshing network info cache for port c6c991f3-51b9-4502-af97-3ca846db3c73 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 785.077973] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017071, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.250794] env[63345]: DEBUG nova.network.neutron [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Successfully updated port: 253ec25c-f8ef-41b2-a789-844509636178 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 785.276603] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e6a3c7b6-fdbf-4dc5-8bf4-9de718283653 tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "e6bc8cb9-2f1a-49cb-974d-ea9a211126ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.517s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.290902] env[63345]: DEBUG nova.objects.base [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Object Instance<85fb1ecd-4ca3-401d-a87a-131f0b275506> lazy-loaded attributes: flavor,pci_requests {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 785.290902] env[63345]: DEBUG nova.network.neutron [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 785.310810] env[63345]: DEBUG nova.compute.manager [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 785.311264] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01879bcc-099c-4473-8805-cb29b327015f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.323991] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51815ad1-dc7c-4442-8330-fb8907c4c98f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.342650] env[63345]: DEBUG nova.policy [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e36fd04030444217acadbbf4e4fe9be0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33c28bfca4da460e8ca96dc7519204c8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 785.366535] env[63345]: WARNING nova.virt.vmwareapi.driver [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 3b0d115d-dad5-4881-a0e0-b98f555da533 could not be found. [ 785.366766] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 785.369278] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64aba374-769d-4cb1-8d33-fd161d24cae4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.378876] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78dc2ffd-73d4-41a7-af53-5deac7c31e95 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.417918] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3b0d115d-dad5-4881-a0e0-b98f555da533 could not be found. [ 785.418337] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 785.418554] env[63345]: INFO nova.compute.manager [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Took 0.11 seconds to destroy the instance on the hypervisor. [ 785.418856] env[63345]: DEBUG oslo.service.loopingcall [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 785.421402] env[63345]: DEBUG nova.compute.manager [-] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 785.421514] env[63345]: DEBUG nova.network.neutron [-] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 785.474312] env[63345]: DEBUG oslo_concurrency.lockutils [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.579665] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017071, 'name': CreateVM_Task, 'duration_secs': 0.742976} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.582023] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 785.583280] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.583280] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.583492] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 785.583742] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e19c01b-cdf3-46c3-afa0-cc229caedd88 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.590097] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for the task: (returnval){ [ 785.590097] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]521cc180-099e-f24b-30a2-cc07b018db34" [ 785.590097] env[63345]: _type = "Task" [ 785.590097] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.598404] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521cc180-099e-f24b-30a2-cc07b018db34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.716953] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ee1aff-d3bc-4c85-822d-d15eda4e1099 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.725221] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c6e97c-b477-4fde-9f24-b3ea2351ad9d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.763386] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Acquiring lock "refresh_cache-cb712d80-be78-4c19-a891-329011521f30" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.763386] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Acquired lock "refresh_cache-cb712d80-be78-4c19-a891-329011521f30" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.763386] env[63345]: DEBUG nova.network.neutron [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 785.765959] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1018f745-6501-4696-a118-c9ba185e9fa8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.773300] env[63345]: DEBUG nova.network.neutron [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Successfully created port: 27e88e33-527e-43d8-af2c-7bb4bdac51e2 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.779142] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53307f04-e79a-43dd-bfd5-3917001ed9a0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.793883] env[63345]: DEBUG nova.compute.provider_tree [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 785.812138] env[63345]: DEBUG nova.network.neutron [req-673280b6-239f-468e-b972-56c48aab5975 req-3dd2d721-4467-44d5-a0c2-4a5e40c70720 service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Updated VIF entry in instance network info cache for port c6c991f3-51b9-4502-af97-3ca846db3c73. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 785.812491] env[63345]: DEBUG nova.network.neutron [req-673280b6-239f-468e-b972-56c48aab5975 req-3dd2d721-4467-44d5-a0c2-4a5e40c70720 service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Updating instance_info_cache with network_info: [{"id": "c6c991f3-51b9-4502-af97-3ca846db3c73", "address": "fa:16:3e:87:40:da", "network": {"id": "e38fba0e-9c96-4a09-b0f0-08546e52eba6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-944344960-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "307c1bea8f6d47ddb4d5ebac8bba25ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6c991f3-51", "ovs_interfaceid": "c6c991f3-51b9-4502-af97-3ca846db3c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.108881] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521cc180-099e-f24b-30a2-cc07b018db34, 'name': SearchDatastore_Task, 'duration_secs': 0.013021} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.109319] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.109575] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 786.110320] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.110618] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.110928] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 786.111417] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-acff34af-182a-48f2-99b1-5fdd77caadce {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.121269] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 786.121730] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 786.122988] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ce7b2b2-94c5-4ec8-90fd-03235fc2906f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.128377] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for the task: (returnval){ [ 786.128377] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5200750a-c0e7-0cf7-6f5a-3b2d8d1eed40" [ 786.128377] env[63345]: _type = "Task" [ 786.128377] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.137246] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5200750a-c0e7-0cf7-6f5a-3b2d8d1eed40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.296583] env[63345]: DEBUG nova.scheduler.client.report [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 786.311457] env[63345]: DEBUG nova.network.neutron [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 786.314812] env[63345]: DEBUG oslo_concurrency.lockutils [req-673280b6-239f-468e-b972-56c48aab5975 req-3dd2d721-4467-44d5-a0c2-4a5e40c70720 service nova] Releasing lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.460704] env[63345]: DEBUG nova.network.neutron [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Updating instance_info_cache with network_info: [{"id": "253ec25c-f8ef-41b2-a789-844509636178", "address": "fa:16:3e:74:ae:d2", "network": {"id": "e9d84e91-5123-45ff-bfe5-daed25b5cc76", "bridge": "br-int", "label": "tempest-ServersTestJSON-1455763601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c17e4fcceb7439183e78786579ac4f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap253ec25c-f8", "ovs_interfaceid": "253ec25c-f8ef-41b2-a789-844509636178", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.638490] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5200750a-c0e7-0cf7-6f5a-3b2d8d1eed40, 'name': SearchDatastore_Task, 'duration_secs': 0.010666} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.639356] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11b1ddb5-cd1e-4e45-964a-8077114029b2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.644384] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for the task: (returnval){ [ 786.644384] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5255e7e4-c6a9-c3a4-2810-b2e535bf7165" [ 786.644384] env[63345]: _type = "Task" [ 786.644384] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.651984] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5255e7e4-c6a9-c3a4-2810-b2e535bf7165, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.675471] env[63345]: DEBUG nova.network.neutron [-] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.802089] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.055s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.804968] env[63345]: DEBUG oslo_concurrency.lockutils [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.637s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.805206] env[63345]: DEBUG oslo_concurrency.lockutils [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.806979] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.765s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.809086] env[63345]: INFO nova.compute.claims [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 786.832675] env[63345]: INFO nova.scheduler.client.report [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Deleted allocations for instance d3e99100-f13f-4019-9b5a-adaa65dacc5f [ 786.841312] env[63345]: INFO nova.scheduler.client.report [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Deleted allocations for instance 27c6dc17-4ded-4fe7-8fba-265eae64fc32 [ 786.963722] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Releasing lock "refresh_cache-cb712d80-be78-4c19-a891-329011521f30" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.964074] env[63345]: DEBUG nova.compute.manager [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Instance network_info: |[{"id": "253ec25c-f8ef-41b2-a789-844509636178", "address": "fa:16:3e:74:ae:d2", "network": {"id": "e9d84e91-5123-45ff-bfe5-daed25b5cc76", "bridge": "br-int", "label": "tempest-ServersTestJSON-1455763601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c17e4fcceb7439183e78786579ac4f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap253ec25c-f8", "ovs_interfaceid": "253ec25c-f8ef-41b2-a789-844509636178", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 786.964518] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:ae:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '257e5ea7-8b80-4301-9900-a754f1fe2031', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '253ec25c-f8ef-41b2-a789-844509636178', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 786.972082] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Creating folder: Project (5c17e4fcceb7439183e78786579ac4f0). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 786.972687] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-922516ae-e7b8-4655-a5a7-322014cb8cd8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.982867] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Created folder: Project (5c17e4fcceb7439183e78786579ac4f0) in parent group-v225918. [ 786.983120] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Creating folder: Instances. Parent ref: group-v226038. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 786.983336] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-64361792-dc01-4298-93af-9caaf3035568 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.992383] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Created folder: Instances in parent group-v226038. [ 786.992618] env[63345]: DEBUG oslo.service.loopingcall [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 786.992808] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb712d80-be78-4c19-a891-329011521f30] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 786.993073] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-adb871ab-0c47-4d2d-b155-29f4fff1a7b3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.011785] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 787.011785] env[63345]: value = "task-1017074" [ 787.011785] env[63345]: _type = "Task" [ 787.011785] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.019862] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017074, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.154681] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5255e7e4-c6a9-c3a4-2810-b2e535bf7165, 'name': SearchDatastore_Task, 'duration_secs': 0.012561} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.155032] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.155287] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] 00c58889-75f7-4a4b-a5a3-a45723c1f495/00c58889-75f7-4a4b-a5a3-a45723c1f495.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 787.155556] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7b2dd19-c3ec-427e-8f58-5643df2f243b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.165966] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for the task: (returnval){ [ 787.165966] env[63345]: value = "task-1017075" [ 787.165966] env[63345]: _type = "Task" [ 787.165966] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.173477] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017075, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.178071] env[63345]: INFO nova.compute.manager [-] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Took 1.76 seconds to deallocate network for instance. [ 787.213937] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "3e4e58bd-903b-4b3d-8be4-5678aab6c721" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.213937] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "3e4e58bd-903b-4b3d-8be4-5678aab6c721" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.213937] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "3e4e58bd-903b-4b3d-8be4-5678aab6c721-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.213937] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "3e4e58bd-903b-4b3d-8be4-5678aab6c721-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.213937] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "3e4e58bd-903b-4b3d-8be4-5678aab6c721-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.217969] env[63345]: INFO nova.compute.manager [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Terminating instance [ 787.343277] env[63345]: DEBUG oslo_concurrency.lockutils [None req-aa91f299-8150-490c-b32f-3ea639ffcfb2 tempest-ServerGroupTestJSON-1472670708 tempest-ServerGroupTestJSON-1472670708-project-member] Lock "d3e99100-f13f-4019-9b5a-adaa65dacc5f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.014s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.349889] env[63345]: DEBUG oslo_concurrency.lockutils [None req-76a23b8d-6e05-4458-badc-d174e0598912 tempest-MigrationsAdminTest-1586795887 tempest-MigrationsAdminTest-1586795887-project-member] Lock "27c6dc17-4ded-4fe7-8fba-265eae64fc32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.978s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.523502] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017074, 'name': CreateVM_Task, 'duration_secs': 0.445976} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.523502] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb712d80-be78-4c19-a891-329011521f30] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 787.523964] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.524159] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.524518] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 787.524887] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-204e5709-2ef4-4b0c-a296-191706eba0b3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.530276] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Waiting for the task: (returnval){ [ 787.530276] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52854ae3-b108-2062-4418-c2c813e99e99" [ 787.530276] env[63345]: _type = "Task" [ 787.530276] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.538902] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52854ae3-b108-2062-4418-c2c813e99e99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.553111] env[63345]: DEBUG nova.compute.manager [req-f5b9bd1f-cca1-47d1-baca-23b995dfaa32 req-9ce019f8-f306-4379-9995-360caa779f63 service nova] [instance: cb712d80-be78-4c19-a891-329011521f30] Received event network-vif-plugged-253ec25c-f8ef-41b2-a789-844509636178 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 787.553348] env[63345]: DEBUG oslo_concurrency.lockutils [req-f5b9bd1f-cca1-47d1-baca-23b995dfaa32 req-9ce019f8-f306-4379-9995-360caa779f63 service nova] Acquiring lock "cb712d80-be78-4c19-a891-329011521f30-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.553560] env[63345]: DEBUG oslo_concurrency.lockutils [req-f5b9bd1f-cca1-47d1-baca-23b995dfaa32 req-9ce019f8-f306-4379-9995-360caa779f63 service nova] Lock "cb712d80-be78-4c19-a891-329011521f30-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.553727] env[63345]: DEBUG oslo_concurrency.lockutils [req-f5b9bd1f-cca1-47d1-baca-23b995dfaa32 req-9ce019f8-f306-4379-9995-360caa779f63 service nova] Lock "cb712d80-be78-4c19-a891-329011521f30-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.553898] env[63345]: DEBUG nova.compute.manager [req-f5b9bd1f-cca1-47d1-baca-23b995dfaa32 req-9ce019f8-f306-4379-9995-360caa779f63 service nova] [instance: cb712d80-be78-4c19-a891-329011521f30] No waiting events found dispatching network-vif-plugged-253ec25c-f8ef-41b2-a789-844509636178 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 787.554092] env[63345]: WARNING nova.compute.manager [req-f5b9bd1f-cca1-47d1-baca-23b995dfaa32 req-9ce019f8-f306-4379-9995-360caa779f63 service nova] [instance: cb712d80-be78-4c19-a891-329011521f30] Received unexpected event network-vif-plugged-253ec25c-f8ef-41b2-a789-844509636178 for instance with vm_state building and task_state spawning. [ 787.554263] env[63345]: DEBUG nova.compute.manager [req-f5b9bd1f-cca1-47d1-baca-23b995dfaa32 req-9ce019f8-f306-4379-9995-360caa779f63 service nova] [instance: cb712d80-be78-4c19-a891-329011521f30] Received event network-changed-253ec25c-f8ef-41b2-a789-844509636178 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 787.554422] env[63345]: DEBUG nova.compute.manager [req-f5b9bd1f-cca1-47d1-baca-23b995dfaa32 req-9ce019f8-f306-4379-9995-360caa779f63 service nova] [instance: cb712d80-be78-4c19-a891-329011521f30] Refreshing instance network info cache due to event network-changed-253ec25c-f8ef-41b2-a789-844509636178. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 787.554603] env[63345]: DEBUG oslo_concurrency.lockutils [req-f5b9bd1f-cca1-47d1-baca-23b995dfaa32 req-9ce019f8-f306-4379-9995-360caa779f63 service nova] Acquiring lock "refresh_cache-cb712d80-be78-4c19-a891-329011521f30" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.554745] env[63345]: DEBUG oslo_concurrency.lockutils [req-f5b9bd1f-cca1-47d1-baca-23b995dfaa32 req-9ce019f8-f306-4379-9995-360caa779f63 service nova] Acquired lock "refresh_cache-cb712d80-be78-4c19-a891-329011521f30" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.554907] env[63345]: DEBUG nova.network.neutron [req-f5b9bd1f-cca1-47d1-baca-23b995dfaa32 req-9ce019f8-f306-4379-9995-360caa779f63 service nova] [instance: cb712d80-be78-4c19-a891-329011521f30] Refreshing network info cache for port 253ec25c-f8ef-41b2-a789-844509636178 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 787.560345] env[63345]: DEBUG nova.network.neutron [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Successfully updated port: 27e88e33-527e-43d8-af2c-7bb4bdac51e2 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 787.677110] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017075, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.770812] env[63345]: DEBUG nova.compute.manager [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 787.770812] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 787.770812] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9793d520-fad9-484a-9bbc-7c90291b444d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.772820] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 787.773092] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78a52d68-f374-4139-96cb-516b2b026225 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.780913] env[63345]: DEBUG oslo_vmware.api [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 787.780913] env[63345]: value = "task-1017076" [ 787.780913] env[63345]: _type = "Task" [ 787.780913] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.791550] env[63345]: INFO nova.compute.manager [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Took 0.61 seconds to detach 1 volumes for instance. [ 787.794360] env[63345]: DEBUG nova.compute.manager [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Deleting volume: fcaa69f8-c32a-43e4-8f84-b58e01f1b245 {{(pid=63345) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3281}} [ 787.800864] env[63345]: DEBUG oslo_vmware.api [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1017076, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.042456] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52854ae3-b108-2062-4418-c2c813e99e99, 'name': SearchDatastore_Task, 'duration_secs': 0.054287} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.045434] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.045694] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 788.046392] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.046392] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.046392] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 788.046830] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-efb260bc-0645-43f1-8a63-16b962a8d56b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.060549] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 788.060816] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 788.062015] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-748dda11-c7b5-495c-8dc7-87be32198ccd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.066760] env[63345]: DEBUG oslo_concurrency.lockutils [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "refresh_cache-85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.066957] env[63345]: DEBUG oslo_concurrency.lockutils [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "refresh_cache-85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.067134] env[63345]: DEBUG nova.network.neutron [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 788.078180] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Waiting for the task: (returnval){ [ 788.078180] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f7058f-234c-ff9a-8581-776a318142b1" [ 788.078180] env[63345]: _type = "Task" [ 788.078180] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.095887] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f7058f-234c-ff9a-8581-776a318142b1, 'name': SearchDatastore_Task, 'duration_secs': 0.014434} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.104529] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f85d106-0ddc-459c-8715-c9483959fbc6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.111980] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Waiting for the task: (returnval){ [ 788.111980] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52290a7f-203a-dd68-3c9e-dba4f97b3dac" [ 788.111980] env[63345]: _type = "Task" [ 788.111980] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.127754] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52290a7f-203a-dd68-3c9e-dba4f97b3dac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.176920] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017075, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559576} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.177222] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] 00c58889-75f7-4a4b-a5a3-a45723c1f495/00c58889-75f7-4a4b-a5a3-a45723c1f495.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 788.179109] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 788.179109] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1b8436ff-1edc-4af6-88b6-c45effe36f11 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.184349] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for the task: (returnval){ [ 788.184349] env[63345]: value = "task-1017078" [ 788.184349] env[63345]: _type = "Task" [ 788.184349] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.198133] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017078, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.292545] env[63345]: DEBUG oslo_vmware.api [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1017076, 'name': PowerOffVM_Task, 'duration_secs': 0.392574} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.293582] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 788.293582] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 788.293582] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89fa41e4-808c-4d2a-b0cc-05cb87325c79 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.308298] env[63345]: DEBUG nova.network.neutron [req-f5b9bd1f-cca1-47d1-baca-23b995dfaa32 req-9ce019f8-f306-4379-9995-360caa779f63 service nova] [instance: cb712d80-be78-4c19-a891-329011521f30] Updated VIF entry in instance network info cache for port 253ec25c-f8ef-41b2-a789-844509636178. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 788.308694] env[63345]: DEBUG nova.network.neutron [req-f5b9bd1f-cca1-47d1-baca-23b995dfaa32 req-9ce019f8-f306-4379-9995-360caa779f63 service nova] [instance: cb712d80-be78-4c19-a891-329011521f30] Updating instance_info_cache with network_info: [{"id": "253ec25c-f8ef-41b2-a789-844509636178", "address": "fa:16:3e:74:ae:d2", "network": {"id": "e9d84e91-5123-45ff-bfe5-daed25b5cc76", "bridge": "br-int", "label": "tempest-ServersTestJSON-1455763601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c17e4fcceb7439183e78786579ac4f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap253ec25c-f8", "ovs_interfaceid": "253ec25c-f8ef-41b2-a789-844509636178", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.325542] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4aefd8b-35ec-4f00-b55e-621a1f81f86d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.335920] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a642b6fb-e5ea-4ef6-971c-1b89fba935b2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.374165] env[63345]: DEBUG oslo_concurrency.lockutils [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.377619] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a51e912-28de-4f63-b806-8fbfdd73080f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.379965] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 788.380240] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 788.380464] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Deleting the datastore file [datastore2] 3e4e58bd-903b-4b3d-8be4-5678aab6c721 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 788.380792] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-affbcdf0-3205-4f53-a51b-69c5ce9b4911 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.390020] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0174ccf2-a093-4aff-8fbc-e4a774f205e4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.394570] env[63345]: DEBUG oslo_vmware.api [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for the task: (returnval){ [ 788.394570] env[63345]: value = "task-1017080" [ 788.394570] env[63345]: _type = "Task" [ 788.394570] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.408429] env[63345]: DEBUG nova.compute.provider_tree [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 788.415531] env[63345]: DEBUG oslo_vmware.api [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1017080, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.618894] env[63345]: WARNING nova.network.neutron [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] b360ab0d-3deb-4632-a8d5-c1639db9e9e2 already exists in list: networks containing: ['b360ab0d-3deb-4632-a8d5-c1639db9e9e2']. ignoring it [ 788.625307] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52290a7f-203a-dd68-3c9e-dba4f97b3dac, 'name': SearchDatastore_Task, 'duration_secs': 0.019835} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.625307] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.625307] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] cb712d80-be78-4c19-a891-329011521f30/cb712d80-be78-4c19-a891-329011521f30.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 788.625307] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb288045-9753-4e7f-b2ef-171f52fea08c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.632206] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Waiting for the task: (returnval){ [ 788.632206] env[63345]: value = "task-1017081" [ 788.632206] env[63345]: _type = "Task" [ 788.632206] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.639934] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': task-1017081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.696140] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017078, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.1112} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.696564] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 788.700073] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aab1c3a-f1bf-455d-ad58-3cf439f95c6f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.724806] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 00c58889-75f7-4a4b-a5a3-a45723c1f495/00c58889-75f7-4a4b-a5a3-a45723c1f495.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 788.725199] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ddb313aa-3545-49d1-b2a5-886afee1bbe6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.750170] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for the task: (returnval){ [ 788.750170] env[63345]: value = "task-1017082" [ 788.750170] env[63345]: _type = "Task" [ 788.750170] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.758813] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017082, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.814484] env[63345]: DEBUG oslo_concurrency.lockutils [req-f5b9bd1f-cca1-47d1-baca-23b995dfaa32 req-9ce019f8-f306-4379-9995-360caa779f63 service nova] Releasing lock "refresh_cache-cb712d80-be78-4c19-a891-329011521f30" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.814810] env[63345]: DEBUG nova.compute.manager [req-f5b9bd1f-cca1-47d1-baca-23b995dfaa32 req-9ce019f8-f306-4379-9995-360caa779f63 service nova] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Received event network-vif-deleted-9bf872ef-9bac-41a4-b3eb-319d2572fee6 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 788.909143] env[63345]: DEBUG oslo_vmware.api [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Task: {'id': task-1017080, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151344} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.909247] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 788.909471] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 788.909687] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 788.909974] env[63345]: INFO nova.compute.manager [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Took 1.18 seconds to destroy the instance on the hypervisor. [ 788.910308] env[63345]: DEBUG oslo.service.loopingcall [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 788.910541] env[63345]: DEBUG nova.compute.manager [-] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 788.910650] env[63345]: DEBUG nova.network.neutron [-] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 788.913567] env[63345]: DEBUG nova.scheduler.client.report [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 789.063913] env[63345]: DEBUG nova.network.neutron [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Updating instance_info_cache with network_info: [{"id": "989bf403-079f-46b9-ab79-c645cec393aa", "address": "fa:16:3e:93:20:cd", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989bf403-07", "ovs_interfaceid": "989bf403-079f-46b9-ab79-c645cec393aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "27e88e33-527e-43d8-af2c-7bb4bdac51e2", "address": "fa:16:3e:75:69:b3", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27e88e33-52", "ovs_interfaceid": "27e88e33-527e-43d8-af2c-7bb4bdac51e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.152482] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': task-1017081, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.263336] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017082, 'name': ReconfigVM_Task, 'duration_secs': 0.50275} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.263672] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 00c58889-75f7-4a4b-a5a3-a45723c1f495/00c58889-75f7-4a4b-a5a3-a45723c1f495.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 789.264397] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-da0baa8a-768b-402f-853e-71559a326dff {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.271228] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for the task: (returnval){ [ 789.271228] env[63345]: value = "task-1017083" [ 789.271228] env[63345]: _type = "Task" [ 789.271228] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.281333] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017083, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.422124] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.615s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.422916] env[63345]: DEBUG nova.compute.manager [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 789.426915] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.065s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.427359] env[63345]: DEBUG nova.objects.instance [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Lazy-loading 'resources' on Instance uuid 46d3332a-bfb9-4812-8201-a87467ce5151 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 789.571026] env[63345]: DEBUG oslo_concurrency.lockutils [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "refresh_cache-85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.571026] env[63345]: DEBUG oslo_concurrency.lockutils [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.571026] env[63345]: DEBUG oslo_concurrency.lockutils [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.571026] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc349db-2abd-4b8b-8072-40f797033b64 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.588923] env[63345]: DEBUG nova.virt.hardware [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 789.589520] env[63345]: DEBUG nova.virt.hardware [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 789.589832] env[63345]: DEBUG nova.virt.hardware [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 789.590184] env[63345]: DEBUG nova.virt.hardware [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 789.590483] env[63345]: DEBUG nova.virt.hardware [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 789.590789] env[63345]: DEBUG nova.virt.hardware [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 789.591147] env[63345]: DEBUG nova.virt.hardware [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 789.591525] env[63345]: DEBUG nova.virt.hardware [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 789.591858] env[63345]: DEBUG nova.virt.hardware [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 789.592184] env[63345]: DEBUG nova.virt.hardware [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 789.596548] env[63345]: DEBUG nova.virt.hardware [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 789.600642] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Reconfiguring VM to attach interface {{(pid=63345) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 789.601139] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f61f6e59-7fe7-44b5-93a8-051a0801837f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.621139] env[63345]: DEBUG oslo_vmware.api [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 789.621139] env[63345]: value = "task-1017084" [ 789.621139] env[63345]: _type = "Task" [ 789.621139] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.629379] env[63345]: DEBUG oslo_vmware.api [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017084, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.642494] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': task-1017081, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.557384} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.642900] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] cb712d80-be78-4c19-a891-329011521f30/cb712d80-be78-4c19-a891-329011521f30.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 789.643261] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 789.643625] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-abef5a77-ee60-4abd-b135-8355cc2fc1cf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.654041] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Waiting for the task: (returnval){ [ 789.654041] env[63345]: value = "task-1017085" [ 789.654041] env[63345]: _type = "Task" [ 789.654041] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.662014] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': task-1017085, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.782184] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017083, 'name': Rename_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.858029] env[63345]: DEBUG nova.network.neutron [-] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.930647] env[63345]: DEBUG nova.compute.utils [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 789.932389] env[63345]: DEBUG nova.compute.manager [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 789.932487] env[63345]: DEBUG nova.network.neutron [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 789.988049] env[63345]: DEBUG nova.policy [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd58b1b41d48e44788f4667bf686352de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8f0343855b6147f38b0cb3f2c72330e0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 790.138268] env[63345]: DEBUG oslo_vmware.api [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017084, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.164090] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': task-1017085, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.137262} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.164381] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 790.165182] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a11337-6484-4a21-acfb-cf443ca195a7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.191065] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] cb712d80-be78-4c19-a891-329011521f30/cb712d80-be78-4c19-a891-329011521f30.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 790.193919] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74b9f5aa-14c8-4ed2-a17c-09465ac7a293 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.215674] env[63345]: DEBUG nova.compute.manager [req-28cdd338-f8a4-4166-bd5c-7dc9ac12afda req-cf553cc2-381e-4031-9f80-64f5544a6cc7 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Received event network-vif-plugged-27e88e33-527e-43d8-af2c-7bb4bdac51e2 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 790.215972] env[63345]: DEBUG oslo_concurrency.lockutils [req-28cdd338-f8a4-4166-bd5c-7dc9ac12afda req-cf553cc2-381e-4031-9f80-64f5544a6cc7 service nova] Acquiring lock "85fb1ecd-4ca3-401d-a87a-131f0b275506-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.216276] env[63345]: DEBUG oslo_concurrency.lockutils [req-28cdd338-f8a4-4166-bd5c-7dc9ac12afda req-cf553cc2-381e-4031-9f80-64f5544a6cc7 service nova] Lock "85fb1ecd-4ca3-401d-a87a-131f0b275506-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.216522] env[63345]: DEBUG oslo_concurrency.lockutils [req-28cdd338-f8a4-4166-bd5c-7dc9ac12afda req-cf553cc2-381e-4031-9f80-64f5544a6cc7 service nova] Lock "85fb1ecd-4ca3-401d-a87a-131f0b275506-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.216750] env[63345]: DEBUG nova.compute.manager [req-28cdd338-f8a4-4166-bd5c-7dc9ac12afda req-cf553cc2-381e-4031-9f80-64f5544a6cc7 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] No waiting events found dispatching network-vif-plugged-27e88e33-527e-43d8-af2c-7bb4bdac51e2 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 790.217081] env[63345]: WARNING nova.compute.manager [req-28cdd338-f8a4-4166-bd5c-7dc9ac12afda req-cf553cc2-381e-4031-9f80-64f5544a6cc7 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Received unexpected event network-vif-plugged-27e88e33-527e-43d8-af2c-7bb4bdac51e2 for instance with vm_state active and task_state None. [ 790.217308] env[63345]: DEBUG nova.compute.manager [req-28cdd338-f8a4-4166-bd5c-7dc9ac12afda req-cf553cc2-381e-4031-9f80-64f5544a6cc7 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Received event network-changed-27e88e33-527e-43d8-af2c-7bb4bdac51e2 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 790.217518] env[63345]: DEBUG nova.compute.manager [req-28cdd338-f8a4-4166-bd5c-7dc9ac12afda req-cf553cc2-381e-4031-9f80-64f5544a6cc7 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Refreshing instance network info cache due to event network-changed-27e88e33-527e-43d8-af2c-7bb4bdac51e2. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 790.217760] env[63345]: DEBUG oslo_concurrency.lockutils [req-28cdd338-f8a4-4166-bd5c-7dc9ac12afda req-cf553cc2-381e-4031-9f80-64f5544a6cc7 service nova] Acquiring lock "refresh_cache-85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.218194] env[63345]: DEBUG oslo_concurrency.lockutils [req-28cdd338-f8a4-4166-bd5c-7dc9ac12afda req-cf553cc2-381e-4031-9f80-64f5544a6cc7 service nova] Acquired lock "refresh_cache-85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.218194] env[63345]: DEBUG nova.network.neutron [req-28cdd338-f8a4-4166-bd5c-7dc9ac12afda req-cf553cc2-381e-4031-9f80-64f5544a6cc7 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Refreshing network info cache for port 27e88e33-527e-43d8-af2c-7bb4bdac51e2 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 790.224245] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Waiting for the task: (returnval){ [ 790.224245] env[63345]: value = "task-1017086" [ 790.224245] env[63345]: _type = "Task" [ 790.224245] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.235658] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': task-1017086, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.268845] env[63345]: DEBUG nova.network.neutron [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Successfully created port: 5c61daf5-e16c-4171-8fbf-a8d0108d4a21 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 790.283052] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017083, 'name': Rename_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.360606] env[63345]: INFO nova.compute.manager [-] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Took 1.45 seconds to deallocate network for instance. [ 790.391863] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c210338a-4cae-4a29-ada4-8d6e4d8844c1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.400751] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda055df-ad88-4f39-aa8c-dc66349e11e8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.432576] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-440366ec-57e0-431f-863a-f8f0e3c38b17 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.440181] env[63345]: DEBUG nova.compute.manager [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 790.444834] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64ec2e4-cbc1-41f3-a5b5-0166174fbb56 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.460906] env[63345]: DEBUG nova.compute.provider_tree [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 790.636903] env[63345]: DEBUG oslo_vmware.api [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017084, 'name': ReconfigVM_Task, 'duration_secs': 0.74128} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.637467] env[63345]: DEBUG oslo_concurrency.lockutils [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.637693] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Reconfigured VM to attach interface {{(pid=63345) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 790.735699] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': task-1017086, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.783532] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017083, 'name': Rename_Task, 'duration_secs': 1.240574} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.783644] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 790.783911] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-21c9f6e4-e513-4150-b55a-930c731cf2ab {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.791094] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for the task: (returnval){ [ 790.791094] env[63345]: value = "task-1017087" [ 790.791094] env[63345]: _type = "Task" [ 790.791094] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.798988] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017087, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.868665] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.925059] env[63345]: DEBUG nova.network.neutron [req-28cdd338-f8a4-4166-bd5c-7dc9ac12afda req-cf553cc2-381e-4031-9f80-64f5544a6cc7 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Updated VIF entry in instance network info cache for port 27e88e33-527e-43d8-af2c-7bb4bdac51e2. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 790.925545] env[63345]: DEBUG nova.network.neutron [req-28cdd338-f8a4-4166-bd5c-7dc9ac12afda req-cf553cc2-381e-4031-9f80-64f5544a6cc7 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Updating instance_info_cache with network_info: [{"id": "989bf403-079f-46b9-ab79-c645cec393aa", "address": "fa:16:3e:93:20:cd", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989bf403-07", "ovs_interfaceid": "989bf403-079f-46b9-ab79-c645cec393aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "27e88e33-527e-43d8-af2c-7bb4bdac51e2", "address": "fa:16:3e:75:69:b3", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27e88e33-52", "ovs_interfaceid": "27e88e33-527e-43d8-af2c-7bb4bdac51e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.966031] env[63345]: DEBUG nova.scheduler.client.report [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 791.142830] env[63345]: DEBUG oslo_concurrency.lockutils [None req-43871fda-22ad-44d3-8c3d-7d22c491d121 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-85fb1ecd-4ca3-401d-a87a-131f0b275506-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.933s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.235907] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': task-1017086, 'name': ReconfigVM_Task, 'duration_secs': 0.754897} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.236222] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Reconfigured VM instance instance-0000003e to attach disk [datastore1] cb712d80-be78-4c19-a891-329011521f30/cb712d80-be78-4c19-a891-329011521f30.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 791.236862] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e2a762b-feb5-4845-b570-783040dcf0ad {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.244159] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Waiting for the task: (returnval){ [ 791.244159] env[63345]: value = "task-1017088" [ 791.244159] env[63345]: _type = "Task" [ 791.244159] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.257627] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': task-1017088, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.301326] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017087, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.429379] env[63345]: DEBUG oslo_concurrency.lockutils [req-28cdd338-f8a4-4166-bd5c-7dc9ac12afda req-cf553cc2-381e-4031-9f80-64f5544a6cc7 service nova] Releasing lock "refresh_cache-85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.429718] env[63345]: DEBUG nova.compute.manager [req-28cdd338-f8a4-4166-bd5c-7dc9ac12afda req-cf553cc2-381e-4031-9f80-64f5544a6cc7 service nova] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Received event network-vif-deleted-1e56115d-0d69-4b50-8607-b08677046c73 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 791.449379] env[63345]: DEBUG nova.compute.manager [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 791.471018] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.044s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.475413] env[63345]: DEBUG nova.virt.hardware [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 791.475701] env[63345]: DEBUG nova.virt.hardware [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 791.475798] env[63345]: DEBUG nova.virt.hardware [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 791.475991] env[63345]: DEBUG nova.virt.hardware [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 791.476165] env[63345]: DEBUG nova.virt.hardware [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 791.476321] env[63345]: DEBUG nova.virt.hardware [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 791.476661] env[63345]: DEBUG nova.virt.hardware [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 791.476818] env[63345]: DEBUG nova.virt.hardware [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 791.476848] env[63345]: DEBUG nova.virt.hardware [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 791.477302] env[63345]: DEBUG nova.virt.hardware [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 791.477302] env[63345]: DEBUG nova.virt.hardware [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 791.477619] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 26.114s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.479987] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69189553-7bbb-46a8-af7d-a857871a2bc2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.489056] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c7651d-d645-4232-a4ca-cd1aac383e2e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.505257] env[63345]: INFO nova.scheduler.client.report [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Deleted allocations for instance 46d3332a-bfb9-4812-8201-a87467ce5151 [ 791.759792] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': task-1017088, 'name': Rename_Task, 'duration_secs': 0.197307} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.760449] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 791.760751] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17f1550c-1f2e-4757-8e0f-8a3f645da39d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.769311] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Waiting for the task: (returnval){ [ 791.769311] env[63345]: value = "task-1017089" [ 791.769311] env[63345]: _type = "Task" [ 791.769311] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.781307] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': task-1017089, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.801795] env[63345]: DEBUG oslo_vmware.api [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017087, 'name': PowerOnVM_Task, 'duration_secs': 0.854354} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.802167] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 791.802337] env[63345]: INFO nova.compute.manager [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Took 9.83 seconds to spawn the instance on the hypervisor. [ 791.802522] env[63345]: DEBUG nova.compute.manager [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 791.804395] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8d8175-a1aa-4980-8651-ff7fb38a22bf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.911546] env[63345]: DEBUG nova.network.neutron [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Successfully updated port: 5c61daf5-e16c-4171-8fbf-a8d0108d4a21 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 792.014862] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a81329fb-cc18-4913-bbf9-8c001583ad88 tempest-ServersTestManualDisk-2063123197 tempest-ServersTestManualDisk-2063123197-project-member] Lock "46d3332a-bfb9-4812-8201-a87467ce5151" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.381s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.282469] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': task-1017089, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.336960] env[63345]: INFO nova.compute.manager [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Took 43.04 seconds to build instance. [ 792.415410] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquiring lock "refresh_cache-fe3e2b2a-9583-482e-b69b-6c130801d7db" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.415410] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquired lock "refresh_cache-fe3e2b2a-9583-482e-b69b-6c130801d7db" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.415410] env[63345]: DEBUG nova.network.neutron [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 792.521665] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance bc9d2e6a-f77a-4a21-90bc-81949cbfce91 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 792.521871] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 070a834d-6478-4705-8df0-2a27c8780507 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 792.522040] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 805f9143-a8d8-4995-a20d-3b10ef3ab599 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 792.522220] env[63345]: WARNING nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 3e4e58bd-903b-4b3d-8be4-5678aab6c721 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 792.522812] env[63345]: WARNING nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 7bef089c-e93b-4ba6-a683-4e076489f92a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 792.522812] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance f043239f-7158-4199-a784-d711a5a301be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 792.522812] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 869f8110-6490-4a47-955a-0ce085f826af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 792.522812] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 85fb1ecd-4ca3-401d-a87a-131f0b275506 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 792.522812] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 4a59b565-571f-48ef-97bd-bed9853e2d8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 792.523041] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 6cbe136b-5bf6-4f17-bcef-b712d850615f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 792.524144] env[63345]: WARNING nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 778faa4f-4c5f-4ec2-b17b-5d7513c9c218 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 792.524144] env[63345]: WARNING nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 93112cc1-f9a1-4188-9555-bddf483426a1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 792.524144] env[63345]: WARNING nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance e3d52cbd-e768-4425-b83e-180a6e58fd00 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 792.524144] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 3a85df04-3997-48a3-8992-f24fe997b3cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 792.524144] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance b5173471-3367-42ba-b450-62ad8573f048 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 792.524144] env[63345]: WARNING nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance bcec23fe-75c7-479e-9210-85ca6781d7e5 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 792.524653] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance a9b69d13-6330-4f9b-b8e1-1c0017655f9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 792.524864] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 02eb493e-d1a1-4461-8e3f-e493e96fe058 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 792.525050] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 00c58889-75f7-4a4b-a5a3-a45723c1f495 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 792.525216] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance cb712d80-be78-4c19-a891-329011521f30 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 792.525405] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance fe3e2b2a-9583-482e-b69b-6c130801d7db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 792.526885] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Acquiring lock "a9b69d13-6330-4f9b-b8e1-1c0017655f9f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.527256] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Lock "a9b69d13-6330-4f9b-b8e1-1c0017655f9f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.527508] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Acquiring lock "a9b69d13-6330-4f9b-b8e1-1c0017655f9f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.527710] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Lock "a9b69d13-6330-4f9b-b8e1-1c0017655f9f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.527883] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Lock "a9b69d13-6330-4f9b-b8e1-1c0017655f9f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.529938] env[63345]: INFO nova.compute.manager [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Terminating instance [ 792.672115] env[63345]: INFO nova.compute.manager [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Rescuing [ 792.672329] env[63345]: DEBUG oslo_concurrency.lockutils [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Acquiring lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.672891] env[63345]: DEBUG oslo_concurrency.lockutils [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Acquired lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.673265] env[63345]: DEBUG nova.network.neutron [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 792.780653] env[63345]: DEBUG oslo_vmware.api [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': task-1017089, 'name': PowerOnVM_Task, 'duration_secs': 0.758474} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.780977] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 792.781987] env[63345]: INFO nova.compute.manager [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Took 8.12 seconds to spawn the instance on the hypervisor. [ 792.782239] env[63345]: DEBUG nova.compute.manager [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 792.783054] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a73221-e909-49ed-b12d-3d08e69347e8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.838888] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fbc92d26-5568-46a6-ae54-fbf47da23888 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Lock "00c58889-75f7-4a4b-a5a3-a45723c1f495" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.952s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.917019] env[63345]: DEBUG nova.compute.manager [req-7c0152fa-b02a-425b-8d66-91db38255f37 req-edd8cc29-eea5-4738-a011-50718f5ca06d service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Received event network-vif-plugged-5c61daf5-e16c-4171-8fbf-a8d0108d4a21 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 792.917280] env[63345]: DEBUG oslo_concurrency.lockutils [req-7c0152fa-b02a-425b-8d66-91db38255f37 req-edd8cc29-eea5-4738-a011-50718f5ca06d service nova] Acquiring lock "fe3e2b2a-9583-482e-b69b-6c130801d7db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.917442] env[63345]: DEBUG oslo_concurrency.lockutils [req-7c0152fa-b02a-425b-8d66-91db38255f37 req-edd8cc29-eea5-4738-a011-50718f5ca06d service nova] Lock "fe3e2b2a-9583-482e-b69b-6c130801d7db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.917617] env[63345]: DEBUG oslo_concurrency.lockutils [req-7c0152fa-b02a-425b-8d66-91db38255f37 req-edd8cc29-eea5-4738-a011-50718f5ca06d service nova] Lock "fe3e2b2a-9583-482e-b69b-6c130801d7db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.917789] env[63345]: DEBUG nova.compute.manager [req-7c0152fa-b02a-425b-8d66-91db38255f37 req-edd8cc29-eea5-4738-a011-50718f5ca06d service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] No waiting events found dispatching network-vif-plugged-5c61daf5-e16c-4171-8fbf-a8d0108d4a21 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 792.917960] env[63345]: WARNING nova.compute.manager [req-7c0152fa-b02a-425b-8d66-91db38255f37 req-edd8cc29-eea5-4738-a011-50718f5ca06d service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Received unexpected event network-vif-plugged-5c61daf5-e16c-4171-8fbf-a8d0108d4a21 for instance with vm_state building and task_state spawning. [ 792.919168] env[63345]: DEBUG nova.compute.manager [req-7c0152fa-b02a-425b-8d66-91db38255f37 req-edd8cc29-eea5-4738-a011-50718f5ca06d service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Received event network-changed-5c61daf5-e16c-4171-8fbf-a8d0108d4a21 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 792.919368] env[63345]: DEBUG nova.compute.manager [req-7c0152fa-b02a-425b-8d66-91db38255f37 req-edd8cc29-eea5-4738-a011-50718f5ca06d service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Refreshing instance network info cache due to event network-changed-5c61daf5-e16c-4171-8fbf-a8d0108d4a21. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 792.919557] env[63345]: DEBUG oslo_concurrency.lockutils [req-7c0152fa-b02a-425b-8d66-91db38255f37 req-edd8cc29-eea5-4738-a011-50718f5ca06d service nova] Acquiring lock "refresh_cache-fe3e2b2a-9583-482e-b69b-6c130801d7db" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.960816] env[63345]: DEBUG nova.network.neutron [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 793.032815] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 4f108dcc-c130-4c3f-840d-7a912150db3f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 793.036995] env[63345]: DEBUG nova.compute.manager [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 793.038154] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 793.038154] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3703b3a9-03a2-4cf0-8e7d-1f667a18165a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.050992] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 793.051273] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5b45bfbd-cf61-46f2-b38e-af143397fad4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.061441] env[63345]: DEBUG oslo_vmware.api [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Waiting for the task: (returnval){ [ 793.061441] env[63345]: value = "task-1017090" [ 793.061441] env[63345]: _type = "Task" [ 793.061441] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.075831] env[63345]: DEBUG oslo_vmware.api [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': task-1017090, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.231046] env[63345]: DEBUG nova.network.neutron [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Updating instance_info_cache with network_info: [{"id": "5c61daf5-e16c-4171-8fbf-a8d0108d4a21", "address": "fa:16:3e:5b:b7:f5", "network": {"id": "441f27c7-de99-494b-9db5-8e67e3c8e7b6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-592603355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f0343855b6147f38b0cb3f2c72330e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c61daf5-e1", "ovs_interfaceid": "5c61daf5-e16c-4171-8fbf-a8d0108d4a21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.303593] env[63345]: INFO nova.compute.manager [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Took 43.12 seconds to build instance. [ 793.539311] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 11652422-9136-4453-b932-06695f9bc910 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 793.574019] env[63345]: DEBUG nova.network.neutron [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Updating instance_info_cache with network_info: [{"id": "c6c991f3-51b9-4502-af97-3ca846db3c73", "address": "fa:16:3e:87:40:da", "network": {"id": "e38fba0e-9c96-4a09-b0f0-08546e52eba6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-944344960-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "307c1bea8f6d47ddb4d5ebac8bba25ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6c991f3-51", "ovs_interfaceid": "c6c991f3-51b9-4502-af97-3ca846db3c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.577792] env[63345]: DEBUG oslo_vmware.api [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': task-1017090, 'name': PowerOffVM_Task, 'duration_secs': 0.254431} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.578453] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 793.578553] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 793.579111] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cbeecc9c-45b2-42ee-8079-15170e3a563c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.660136] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 793.660291] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 793.660473] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Deleting the datastore file [datastore2] a9b69d13-6330-4f9b-b8e1-1c0017655f9f {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 793.660754] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-311c2a1c-f7a9-4281-adcd-16944d66c0ac {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.667965] env[63345]: DEBUG oslo_vmware.api [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Waiting for the task: (returnval){ [ 793.667965] env[63345]: value = "task-1017092" [ 793.667965] env[63345]: _type = "Task" [ 793.667965] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.676752] env[63345]: DEBUG oslo_vmware.api [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': task-1017092, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.734893] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Releasing lock "refresh_cache-fe3e2b2a-9583-482e-b69b-6c130801d7db" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.735328] env[63345]: DEBUG nova.compute.manager [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Instance network_info: |[{"id": "5c61daf5-e16c-4171-8fbf-a8d0108d4a21", "address": "fa:16:3e:5b:b7:f5", "network": {"id": "441f27c7-de99-494b-9db5-8e67e3c8e7b6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-592603355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f0343855b6147f38b0cb3f2c72330e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c61daf5-e1", "ovs_interfaceid": "5c61daf5-e16c-4171-8fbf-a8d0108d4a21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 793.735720] env[63345]: DEBUG oslo_concurrency.lockutils [req-7c0152fa-b02a-425b-8d66-91db38255f37 req-edd8cc29-eea5-4738-a011-50718f5ca06d service nova] Acquired lock "refresh_cache-fe3e2b2a-9583-482e-b69b-6c130801d7db" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.735963] env[63345]: DEBUG nova.network.neutron [req-7c0152fa-b02a-425b-8d66-91db38255f37 req-edd8cc29-eea5-4738-a011-50718f5ca06d service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Refreshing network info cache for port 5c61daf5-e16c-4171-8fbf-a8d0108d4a21 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 793.737398] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:b7:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd5970ab5-34b8-4065-bfa6-f568b8f103b7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c61daf5-e16c-4171-8fbf-a8d0108d4a21', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 793.751433] env[63345]: DEBUG oslo.service.loopingcall [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 793.752485] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 793.756032] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f23c6f3d-64c8-4781-9923-fd46e3d71700 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.776796] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 793.776796] env[63345]: value = "task-1017093" [ 793.776796] env[63345]: _type = "Task" [ 793.776796] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.784400] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017093, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.806077] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ed41e7e-bc6a-4833-9a56-ba3e0fb69140 tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Lock "cb712d80-be78-4c19-a891-329011521f30" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.429s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.046221] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 9aa651b8-317d-4153-8c33-9df0a5d16115 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 794.081379] env[63345]: DEBUG oslo_concurrency.lockutils [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Releasing lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.136018] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "interface-85fb1ecd-4ca3-401d-a87a-131f0b275506-27e88e33-527e-43d8-af2c-7bb4bdac51e2" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.136018] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-85fb1ecd-4ca3-401d-a87a-131f0b275506-27e88e33-527e-43d8-af2c-7bb4bdac51e2" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.145863] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Acquiring lock "40d228ea-881e-4442-a16a-6758d061aa39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.145863] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Lock "40d228ea-881e-4442-a16a-6758d061aa39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.181112] env[63345]: DEBUG oslo_vmware.api [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Task: {'id': task-1017092, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.27894} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.181224] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 794.181363] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 794.181550] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 794.181743] env[63345]: INFO nova.compute.manager [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 794.182088] env[63345]: DEBUG oslo.service.loopingcall [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 794.182280] env[63345]: DEBUG nova.compute.manager [-] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 794.182397] env[63345]: DEBUG nova.network.neutron [-] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 794.290793] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017093, 'name': CreateVM_Task, 'duration_secs': 0.346225} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.291166] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 794.291778] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.291951] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.292476] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 794.292748] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19166711-f2bd-4ad7-8f7f-bfe84e1b48f0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.300682] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 794.300682] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52e22813-ed5f-7446-d00a-5abe1ce1da48" [ 794.300682] env[63345]: _type = "Task" [ 794.300682] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.312743] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e22813-ed5f-7446-d00a-5abe1ce1da48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.548794] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance a85688b0-d68f-4370-bd95-dc9fb1d2c26a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 794.640274] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.640545] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.642144] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475712d2-7d52-48b5-bb7a-65b887f76616 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.655590] env[63345]: DEBUG nova.compute.manager [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 794.680090] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da782202-a30b-4ee3-8bbf-ee755e918b79 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.713429] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Reconfiguring VM to detach interface {{(pid=63345) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1974}} [ 794.716435] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca39ed91-665f-486c-9999-41bd1ce0b86c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.738471] env[63345]: DEBUG oslo_vmware.api [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 794.738471] env[63345]: value = "task-1017094" [ 794.738471] env[63345]: _type = "Task" [ 794.738471] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.747575] env[63345]: DEBUG oslo_vmware.api [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017094, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.810489] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e22813-ed5f-7446-d00a-5abe1ce1da48, 'name': SearchDatastore_Task, 'duration_secs': 0.010828} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.811218] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.811218] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 794.811218] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.811355] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.811538] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 794.811815] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d93bea2b-aee6-416d-b1e8-85f17f86725f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.823464] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 794.823675] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 794.824425] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fc7b283-588d-4224-9629-0feba108ba6e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.831170] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 794.831170] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52dd5261-9989-17be-13cb-7185e9d0db5c" [ 794.831170] env[63345]: _type = "Task" [ 794.831170] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.842422] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52dd5261-9989-17be-13cb-7185e9d0db5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.902810] env[63345]: DEBUG oslo_concurrency.lockutils [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Acquiring lock "02eb493e-d1a1-4461-8e3f-e493e96fe058" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.903164] env[63345]: DEBUG oslo_concurrency.lockutils [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Lock "02eb493e-d1a1-4461-8e3f-e493e96fe058" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.903252] env[63345]: DEBUG oslo_concurrency.lockutils [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Acquiring lock "02eb493e-d1a1-4461-8e3f-e493e96fe058-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.903416] env[63345]: DEBUG oslo_concurrency.lockutils [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Lock "02eb493e-d1a1-4461-8e3f-e493e96fe058-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.903571] env[63345]: DEBUG oslo_concurrency.lockutils [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Lock "02eb493e-d1a1-4461-8e3f-e493e96fe058-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.910323] env[63345]: INFO nova.compute.manager [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Terminating instance [ 794.980274] env[63345]: DEBUG nova.network.neutron [req-7c0152fa-b02a-425b-8d66-91db38255f37 req-edd8cc29-eea5-4738-a011-50718f5ca06d service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Updated VIF entry in instance network info cache for port 5c61daf5-e16c-4171-8fbf-a8d0108d4a21. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 794.980852] env[63345]: DEBUG nova.network.neutron [req-7c0152fa-b02a-425b-8d66-91db38255f37 req-edd8cc29-eea5-4738-a011-50718f5ca06d service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Updating instance_info_cache with network_info: [{"id": "5c61daf5-e16c-4171-8fbf-a8d0108d4a21", "address": "fa:16:3e:5b:b7:f5", "network": {"id": "441f27c7-de99-494b-9db5-8e67e3c8e7b6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-592603355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f0343855b6147f38b0cb3f2c72330e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c61daf5-e1", "ovs_interfaceid": "5c61daf5-e16c-4171-8fbf-a8d0108d4a21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.040138] env[63345]: DEBUG nova.compute.manager [req-f5df24d6-451f-46b0-8eab-63100e5d15b6 req-59c240b2-f813-47d8-a0d9-72a2d78585e3 service nova] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Received event network-vif-deleted-57bdfdfd-6642-43d9-a9b0-52fc8ac0b050 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 795.040448] env[63345]: INFO nova.compute.manager [req-f5df24d6-451f-46b0-8eab-63100e5d15b6 req-59c240b2-f813-47d8-a0d9-72a2d78585e3 service nova] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Neutron deleted interface 57bdfdfd-6642-43d9-a9b0-52fc8ac0b050; detaching it from the instance and deleting it from the info cache [ 795.040656] env[63345]: DEBUG nova.network.neutron [req-f5df24d6-451f-46b0-8eab-63100e5d15b6 req-59c240b2-f813-47d8-a0d9-72a2d78585e3 service nova] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.051755] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance df2f06af-54a6-4dbd-83ff-1e4b066acbf3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.136311] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 795.136643] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6812b5d3-f5f4-4fe2-8512-5bc44401e72a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.148519] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for the task: (returnval){ [ 795.148519] env[63345]: value = "task-1017095" [ 795.148519] env[63345]: _type = "Task" [ 795.148519] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.159605] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017095, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.179697] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.250663] env[63345]: DEBUG oslo_vmware.api [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.322520] env[63345]: DEBUG nova.compute.manager [req-9beca594-6ed4-458d-8a05-d932ceab6210 req-37481ffb-e2d4-4a7d-937d-0b34c9a697ec service nova] [instance: cb712d80-be78-4c19-a891-329011521f30] Received event network-changed-253ec25c-f8ef-41b2-a789-844509636178 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 795.322774] env[63345]: DEBUG nova.compute.manager [req-9beca594-6ed4-458d-8a05-d932ceab6210 req-37481ffb-e2d4-4a7d-937d-0b34c9a697ec service nova] [instance: cb712d80-be78-4c19-a891-329011521f30] Refreshing instance network info cache due to event network-changed-253ec25c-f8ef-41b2-a789-844509636178. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 795.322933] env[63345]: DEBUG oslo_concurrency.lockutils [req-9beca594-6ed4-458d-8a05-d932ceab6210 req-37481ffb-e2d4-4a7d-937d-0b34c9a697ec service nova] Acquiring lock "refresh_cache-cb712d80-be78-4c19-a891-329011521f30" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.324833] env[63345]: DEBUG oslo_concurrency.lockutils [req-9beca594-6ed4-458d-8a05-d932ceab6210 req-37481ffb-e2d4-4a7d-937d-0b34c9a697ec service nova] Acquired lock "refresh_cache-cb712d80-be78-4c19-a891-329011521f30" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.325345] env[63345]: DEBUG nova.network.neutron [req-9beca594-6ed4-458d-8a05-d932ceab6210 req-37481ffb-e2d4-4a7d-937d-0b34c9a697ec service nova] [instance: cb712d80-be78-4c19-a891-329011521f30] Refreshing network info cache for port 253ec25c-f8ef-41b2-a789-844509636178 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 795.350831] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52dd5261-9989-17be-13cb-7185e9d0db5c, 'name': SearchDatastore_Task, 'duration_secs': 0.00882} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.351749] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d179edd-c08b-480a-8f13-9f215db37908 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.358605] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 795.358605] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5218b6dc-e332-2cd7-457e-70e742d4fc5d" [ 795.358605] env[63345]: _type = "Task" [ 795.358605] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.371876] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5218b6dc-e332-2cd7-457e-70e742d4fc5d, 'name': SearchDatastore_Task, 'duration_secs': 0.009369} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.372291] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.373240] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] fe3e2b2a-9583-482e-b69b-6c130801d7db/fe3e2b2a-9583-482e-b69b-6c130801d7db.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 795.373240] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c3e9065-8964-4a97-b131-0e6213aba2b8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.380511] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 795.380511] env[63345]: value = "task-1017096" [ 795.380511] env[63345]: _type = "Task" [ 795.380511] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.389343] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017096, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.414237] env[63345]: DEBUG nova.compute.manager [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 795.416072] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 795.417088] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0c8682-3bb2-4cc1-a8ee-e751448bb0b3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.427018] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 795.427980] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a0b7eef7-59da-4f38-afe5-dcdfbad39d29 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.434262] env[63345]: DEBUG oslo_vmware.api [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Waiting for the task: (returnval){ [ 795.434262] env[63345]: value = "task-1017097" [ 795.434262] env[63345]: _type = "Task" [ 795.434262] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.443008] env[63345]: DEBUG oslo_vmware.api [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': task-1017097, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.483422] env[63345]: DEBUG oslo_concurrency.lockutils [req-7c0152fa-b02a-425b-8d66-91db38255f37 req-edd8cc29-eea5-4738-a011-50718f5ca06d service nova] Releasing lock "refresh_cache-fe3e2b2a-9583-482e-b69b-6c130801d7db" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.513194] env[63345]: DEBUG nova.network.neutron [-] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.542961] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-55780d82-e1c9-4135-a3ae-b4f603665414 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.552944] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-622ec7e2-987d-4c01-86ed-8c54e01164ce {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.564053] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 37f269fe-0266-4c03-9641-e6f43072657a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.590407] env[63345]: DEBUG nova.compute.manager [req-f5df24d6-451f-46b0-8eab-63100e5d15b6 req-59c240b2-f813-47d8-a0d9-72a2d78585e3 service nova] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Detach interface failed, port_id=57bdfdfd-6642-43d9-a9b0-52fc8ac0b050, reason: Instance a9b69d13-6330-4f9b-b8e1-1c0017655f9f could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 795.660942] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017095, 'name': PowerOffVM_Task, 'duration_secs': 0.287705} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.661290] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 795.664854] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46970484-a394-472e-91b4-ff400aec9062 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.683638] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d71b62fb-2ff6-4010-b1e8-bc11f3dd0dbe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.729782] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 795.730079] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65157825-80a3-4618-bef1-3247116b5210 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.739684] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for the task: (returnval){ [ 795.739684] env[63345]: value = "task-1017098" [ 795.739684] env[63345]: _type = "Task" [ 795.739684] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.754020] env[63345]: DEBUG oslo_vmware.api [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.757314] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] VM already powered off {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 795.757556] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 795.757818] env[63345]: DEBUG oslo_concurrency.lockutils [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.757982] env[63345]: DEBUG oslo_concurrency.lockutils [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.758276] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 795.758671] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8c34cff-e1c3-497d-a9a2-6b83253d64bf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.774027] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 795.774027] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 795.774027] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97aa8c10-6b88-45f7-8eb5-6ce69a750f87 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.780265] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for the task: (returnval){ [ 795.780265] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52114bb4-38ad-5cf9-d880-f08fb098a3fa" [ 795.780265] env[63345]: _type = "Task" [ 795.780265] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.793171] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52114bb4-38ad-5cf9-d880-f08fb098a3fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.896191] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017096, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.945571] env[63345]: DEBUG oslo_vmware.api [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': task-1017097, 'name': PowerOffVM_Task, 'duration_secs': 0.26694} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.945872] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 795.945998] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 795.946277] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9bf70618-0d9d-48f7-84cb-c1ae39af4ed6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.016278] env[63345]: INFO nova.compute.manager [-] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Took 1.83 seconds to deallocate network for instance. [ 796.041239] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 796.041239] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 796.041239] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Deleting the datastore file [datastore2] 02eb493e-d1a1-4461-8e3f-e493e96fe058 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 796.041239] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2afd0f39-0358-464e-b212-8a9b685c1627 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.050539] env[63345]: DEBUG oslo_vmware.api [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Waiting for the task: (returnval){ [ 796.050539] env[63345]: value = "task-1017100" [ 796.050539] env[63345]: _type = "Task" [ 796.050539] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.059201] env[63345]: DEBUG oslo_vmware.api [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': task-1017100, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.067653] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 796.067932] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 796.069159] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3392MB phys_disk=200GB used_disk=15GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 796.229344] env[63345]: DEBUG nova.network.neutron [req-9beca594-6ed4-458d-8a05-d932ceab6210 req-37481ffb-e2d4-4a7d-937d-0b34c9a697ec service nova] [instance: cb712d80-be78-4c19-a891-329011521f30] Updated VIF entry in instance network info cache for port 253ec25c-f8ef-41b2-a789-844509636178. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 796.229738] env[63345]: DEBUG nova.network.neutron [req-9beca594-6ed4-458d-8a05-d932ceab6210 req-37481ffb-e2d4-4a7d-937d-0b34c9a697ec service nova] [instance: cb712d80-be78-4c19-a891-329011521f30] Updating instance_info_cache with network_info: [{"id": "253ec25c-f8ef-41b2-a789-844509636178", "address": "fa:16:3e:74:ae:d2", "network": {"id": "e9d84e91-5123-45ff-bfe5-daed25b5cc76", "bridge": "br-int", "label": "tempest-ServersTestJSON-1455763601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c17e4fcceb7439183e78786579ac4f0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap253ec25c-f8", "ovs_interfaceid": "253ec25c-f8ef-41b2-a789-844509636178", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.248958] env[63345]: DEBUG oslo_vmware.api [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.292076] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52114bb4-38ad-5cf9-d880-f08fb098a3fa, 'name': SearchDatastore_Task, 'duration_secs': 0.059497} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.292739] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4736e90-525e-45b6-a38b-7b7c30c83f41 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.299553] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for the task: (returnval){ [ 796.299553] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f62978-710f-f12b-e299-916d08d94b3a" [ 796.299553] env[63345]: _type = "Task" [ 796.299553] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.307301] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f62978-710f-f12b-e299-916d08d94b3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.394206] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017096, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.543246} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.394946] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] fe3e2b2a-9583-482e-b69b-6c130801d7db/fe3e2b2a-9583-482e-b69b-6c130801d7db.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 796.394946] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 796.395215] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4f292066-a165-4e8c-844e-5094d2ef2ecc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.409561] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 796.409561] env[63345]: value = "task-1017101" [ 796.409561] env[63345]: _type = "Task" [ 796.409561] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.419166] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017101, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.524525] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.538978] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9cab39-f2cb-4eaa-aae0-260004fdef4d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.545835] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1205745-fcc6-45ec-bb10-c049d59722c8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.585794] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12479410-8642-4af8-b104-dd603f8b481f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.591448] env[63345]: DEBUG oslo_vmware.api [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Task: {'id': task-1017100, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153655} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.592081] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 796.592283] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 796.592469] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 796.592649] env[63345]: INFO nova.compute.manager [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Took 1.18 seconds to destroy the instance on the hypervisor. [ 796.592892] env[63345]: DEBUG oslo.service.loopingcall [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 796.593116] env[63345]: DEBUG nova.compute.manager [-] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 796.593220] env[63345]: DEBUG nova.network.neutron [-] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 796.598262] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bcc5ccf-6ed1-48a8-b569-4d8b57b51336 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.612633] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 796.734328] env[63345]: DEBUG oslo_concurrency.lockutils [req-9beca594-6ed4-458d-8a05-d932ceab6210 req-37481ffb-e2d4-4a7d-937d-0b34c9a697ec service nova] Releasing lock "refresh_cache-cb712d80-be78-4c19-a891-329011521f30" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.750905] env[63345]: DEBUG oslo_vmware.api [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.809405] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f62978-710f-f12b-e299-916d08d94b3a, 'name': SearchDatastore_Task, 'duration_secs': 0.010828} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.809861] env[63345]: DEBUG oslo_concurrency.lockutils [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.810298] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] 00c58889-75f7-4a4b-a5a3-a45723c1f495/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk. {{(pid=63345) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 796.810745] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-906fd8f1-f181-4a58-b382-a605d3d3f996 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.821024] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for the task: (returnval){ [ 796.821024] env[63345]: value = "task-1017102" [ 796.821024] env[63345]: _type = "Task" [ 796.821024] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.829528] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017102, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.919677] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017101, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078797} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.920189] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 796.921297] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca67242b-9d0a-459f-827f-c7de9cbeb649 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.946058] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] fe3e2b2a-9583-482e-b69b-6c130801d7db/fe3e2b2a-9583-482e-b69b-6c130801d7db.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 796.946058] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a13423b2-dabd-479a-9569-059ee35da698 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.966223] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 796.966223] env[63345]: value = "task-1017103" [ 796.966223] env[63345]: _type = "Task" [ 796.966223] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.975872] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017103, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.118740] env[63345]: DEBUG nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 797.169199] env[63345]: DEBUG nova.compute.manager [req-6a446b60-c0c6-4861-ad39-907071274655 req-90cc6183-a34c-4a93-b8ce-8662243687c6 service nova] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Received event network-vif-deleted-9674730a-bad7-4d14-8348-d1d12f0c1c89 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 797.169425] env[63345]: INFO nova.compute.manager [req-6a446b60-c0c6-4861-ad39-907071274655 req-90cc6183-a34c-4a93-b8ce-8662243687c6 service nova] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Neutron deleted interface 9674730a-bad7-4d14-8348-d1d12f0c1c89; detaching it from the instance and deleting it from the info cache [ 797.169630] env[63345]: DEBUG nova.network.neutron [req-6a446b60-c0c6-4861-ad39-907071274655 req-90cc6183-a34c-4a93-b8ce-8662243687c6 service nova] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.255719] env[63345]: DEBUG oslo_vmware.api [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.331230] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017102, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.479021] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017103, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.513382] env[63345]: DEBUG nova.network.neutron [-] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.622535] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63345) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 797.622873] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.145s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.622873] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.086s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.623527] env[63345]: DEBUG nova.objects.instance [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63345) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 797.632235] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 797.632235] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Cleaning up deleted instances {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11557}} [ 797.674036] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-819e32a0-10bd-43f6-8d18-b713b1333e23 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.690160] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba458262-7bc3-49f5-928b-c66f248f119b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.724272] env[63345]: DEBUG nova.compute.manager [req-6a446b60-c0c6-4861-ad39-907071274655 req-90cc6183-a34c-4a93-b8ce-8662243687c6 service nova] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Detach interface failed, port_id=9674730a-bad7-4d14-8348-d1d12f0c1c89, reason: Instance 02eb493e-d1a1-4461-8e3f-e493e96fe058 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 797.750323] env[63345]: DEBUG oslo_vmware.api [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.833491] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017102, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537555} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.833815] env[63345]: INFO nova.virt.vmwareapi.ds_util [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] 00c58889-75f7-4a4b-a5a3-a45723c1f495/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk. [ 797.834756] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b78fe79-9b8f-4a7a-95df-17ddacf8bb16 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.859014] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 00c58889-75f7-4a4b-a5a3-a45723c1f495/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 797.859285] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc43603a-5f91-490a-836d-a1b4c2bfa68e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.878199] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for the task: (returnval){ [ 797.878199] env[63345]: value = "task-1017104" [ 797.878199] env[63345]: _type = "Task" [ 797.878199] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.885736] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017104, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.978286] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017103, 'name': ReconfigVM_Task, 'duration_secs': 0.616248} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.978615] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Reconfigured VM instance instance-0000003f to attach disk [datastore1] fe3e2b2a-9583-482e-b69b-6c130801d7db/fe3e2b2a-9583-482e-b69b-6c130801d7db.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 797.979376] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c9b65c38-0d9d-4db4-8cbf-2491189b2f8b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.987021] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 797.987021] env[63345]: value = "task-1017105" [ 797.987021] env[63345]: _type = "Task" [ 797.987021] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.995644] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017105, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.016803] env[63345]: INFO nova.compute.manager [-] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Took 1.42 seconds to deallocate network for instance. [ 798.142420] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] There are 20 instances to clean {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11566}} [ 798.143100] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: d3e99100-f13f-4019-9b5a-adaa65dacc5f] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 798.146459] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed34006f-59f5-45d9-8b4e-5b2d3375bcdd tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 0.523s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.147212] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.639s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.148701] env[63345]: INFO nova.compute.claims [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.252689] env[63345]: DEBUG oslo_vmware.api [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.389628] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017104, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.497760] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017105, 'name': Rename_Task, 'duration_secs': 0.178242} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.498104] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 798.498367] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd66efa0-a570-48a8-a1bd-2a4da8fbc67a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.505373] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 798.505373] env[63345]: value = "task-1017106" [ 798.505373] env[63345]: _type = "Task" [ 798.505373] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.513658] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017106, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.522772] env[63345]: DEBUG oslo_concurrency.lockutils [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.655645] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 64fcf837-1d9d-41b1-a2a1-3c16362932cf] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 798.753524] env[63345]: DEBUG oslo_vmware.api [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.893494] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017104, 'name': ReconfigVM_Task, 'duration_secs': 0.938901} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.893798] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 00c58889-75f7-4a4b-a5a3-a45723c1f495/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 798.894946] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d32fac-63b4-4f45-ba30-d94308d62eb9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.920128] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c5fc3fe-66ea-4d88-b44c-a685f40ef024 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.938587] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for the task: (returnval){ [ 798.938587] env[63345]: value = "task-1017107" [ 798.938587] env[63345]: _type = "Task" [ 798.938587] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.946888] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017107, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.015797] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017106, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.133435] env[63345]: DEBUG oslo_concurrency.lockutils [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "dde93fd5-6312-4d91-b041-b7fc84b207d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.133680] env[63345]: DEBUG oslo_concurrency.lockutils [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "dde93fd5-6312-4d91-b041-b7fc84b207d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.161898] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 3101726f-5b14-417e-bcf8-390ce1f9b467] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 799.256223] env[63345]: DEBUG oslo_vmware.api [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.457542] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.516629] env[63345]: DEBUG oslo_vmware.api [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017106, 'name': PowerOnVM_Task, 'duration_secs': 0.661204} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.519375] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 799.520175] env[63345]: INFO nova.compute.manager [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Took 8.07 seconds to spawn the instance on the hypervisor. [ 799.520542] env[63345]: DEBUG nova.compute.manager [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 799.521766] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78558b65-e084-43d5-9878-4525325b9150 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.591649] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb62981e-2865-44f8-9f24-16243560bd12 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.603069] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26cfab7b-6c96-42a4-a891-f8a195855cd9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.635552] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818b2c49-0b95-4e4d-aa39-f3d1e691034f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.638342] env[63345]: DEBUG nova.compute.manager [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 799.645636] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aea69ea-bc42-47dd-afd2-8aa20dda988b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.659446] env[63345]: DEBUG nova.compute.provider_tree [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.665331] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 34e0234c-36c4-4878-979b-46f045bd1785] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 799.754027] env[63345]: DEBUG oslo_vmware.api [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.951819] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.043065] env[63345]: INFO nova.compute.manager [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Took 43.02 seconds to build instance. [ 800.157292] env[63345]: DEBUG oslo_concurrency.lockutils [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.162466] env[63345]: DEBUG nova.scheduler.client.report [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 800.167965] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 78a259a1-73e1-4c0a-a9f6-6a8d7cda3b8c] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 800.254230] env[63345]: DEBUG oslo_vmware.api [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017094, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.453029] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017107, 'name': ReconfigVM_Task, 'duration_secs': 1.174667} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.453029] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 800.453336] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c28ea96-324e-4e9d-9f9d-ae88e8d848ca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.461271] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for the task: (returnval){ [ 800.461271] env[63345]: value = "task-1017108" [ 800.461271] env[63345]: _type = "Task" [ 800.461271] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.469681] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017108, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.544491] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a42054c4-a81c-46b5-b4d2-6000a70a20f1 tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "fe3e2b2a-9583-482e-b69b-6c130801d7db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.930s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.667775] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.519s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.667775] env[63345]: DEBUG nova.compute.manager [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 800.669954] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.999s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.670208] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.672534] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.036s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.674808] env[63345]: INFO nova.compute.claims [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 800.678156] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: abc81fa5-78a9-48b1-a49e-2faffddf2411] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 800.708433] env[63345]: INFO nova.scheduler.client.report [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleted allocations for instance e3d52cbd-e768-4425-b83e-180a6e58fd00 [ 800.754953] env[63345]: DEBUG oslo_vmware.api [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017094, 'name': ReconfigVM_Task, 'duration_secs': 5.861962} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.756989] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.756989] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Reconfigured VM to detach interface {{(pid=63345) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1984}} [ 800.970412] env[63345]: DEBUG oslo_vmware.api [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017108, 'name': PowerOnVM_Task, 'duration_secs': 0.463895} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.971944] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 800.974696] env[63345]: DEBUG nova.compute.manager [req-2fcb550b-30a8-4fc0-9d9e-0d350c1c491d req-26a53197-d6b8-45ee-ae15-f09849e9da35 service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Received event network-changed-5c61daf5-e16c-4171-8fbf-a8d0108d4a21 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 800.974920] env[63345]: DEBUG nova.compute.manager [req-2fcb550b-30a8-4fc0-9d9e-0d350c1c491d req-26a53197-d6b8-45ee-ae15-f09849e9da35 service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Refreshing instance network info cache due to event network-changed-5c61daf5-e16c-4171-8fbf-a8d0108d4a21. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 800.975216] env[63345]: DEBUG oslo_concurrency.lockutils [req-2fcb550b-30a8-4fc0-9d9e-0d350c1c491d req-26a53197-d6b8-45ee-ae15-f09849e9da35 service nova] Acquiring lock "refresh_cache-fe3e2b2a-9583-482e-b69b-6c130801d7db" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.975399] env[63345]: DEBUG oslo_concurrency.lockutils [req-2fcb550b-30a8-4fc0-9d9e-0d350c1c491d req-26a53197-d6b8-45ee-ae15-f09849e9da35 service nova] Acquired lock "refresh_cache-fe3e2b2a-9583-482e-b69b-6c130801d7db" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.975595] env[63345]: DEBUG nova.network.neutron [req-2fcb550b-30a8-4fc0-9d9e-0d350c1c491d req-26a53197-d6b8-45ee-ae15-f09849e9da35 service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Refreshing network info cache for port 5c61daf5-e16c-4171-8fbf-a8d0108d4a21 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 800.977332] env[63345]: DEBUG nova.compute.manager [None req-47dfeaf8-0b50-4ebd-bbca-68aee8d31603 tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 800.978382] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0bed07-ea8f-4ea8-8bf9-819724486e3e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.069475] env[63345]: DEBUG nova.compute.manager [req-7b5f4daa-7810-48a5-9782-939b6e6a9a2b req-96b809d4-1bc4-4b7f-b5b3-643094089270 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Received event network-vif-deleted-27e88e33-527e-43d8-af2c-7bb4bdac51e2 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 801.070383] env[63345]: INFO nova.compute.manager [req-7b5f4daa-7810-48a5-9782-939b6e6a9a2b req-96b809d4-1bc4-4b7f-b5b3-643094089270 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Neutron deleted interface 27e88e33-527e-43d8-af2c-7bb4bdac51e2; detaching it from the instance and deleting it from the info cache [ 801.070383] env[63345]: DEBUG nova.network.neutron [req-7b5f4daa-7810-48a5-9782-939b6e6a9a2b req-96b809d4-1bc4-4b7f-b5b3-643094089270 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Updating instance_info_cache with network_info: [{"id": "989bf403-079f-46b9-ab79-c645cec393aa", "address": "fa:16:3e:93:20:cd", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989bf403-07", "ovs_interfaceid": "989bf403-079f-46b9-ab79-c645cec393aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.180017] env[63345]: DEBUG nova.compute.utils [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 801.182027] env[63345]: DEBUG nova.compute.manager [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 801.182145] env[63345]: DEBUG nova.network.neutron [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 801.185985] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 30755716-03a7-41bd-90c2-7ef21baf9975] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 801.219398] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ec4ae4a-1862-4345-933b-1093499b3269 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "e3d52cbd-e768-4425-b83e-180a6e58fd00" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.092s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.231606] env[63345]: DEBUG nova.policy [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6055500166344214a404427722503338', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dfc1248fb5ee4f798b6c59154d4cf623', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 801.572951] env[63345]: DEBUG oslo_concurrency.lockutils [req-7b5f4daa-7810-48a5-9782-939b6e6a9a2b req-96b809d4-1bc4-4b7f-b5b3-643094089270 service nova] Acquiring lock "85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.573174] env[63345]: DEBUG oslo_concurrency.lockutils [req-7b5f4daa-7810-48a5-9782-939b6e6a9a2b req-96b809d4-1bc4-4b7f-b5b3-643094089270 service nova] Acquired lock "85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.574370] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd6b827e-319d-4fe1-9296-574fa002d20c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.577675] env[63345]: DEBUG nova.network.neutron [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Successfully created port: f14abadb-834f-4695-87e7-c79a8d8b328e {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 801.596667] env[63345]: DEBUG oslo_concurrency.lockutils [req-7b5f4daa-7810-48a5-9782-939b6e6a9a2b req-96b809d4-1bc4-4b7f-b5b3-643094089270 service nova] Releasing lock "85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.596971] env[63345]: WARNING nova.compute.manager [req-7b5f4daa-7810-48a5-9782-939b6e6a9a2b req-96b809d4-1bc4-4b7f-b5b3-643094089270 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Detach interface failed, port_id=27e88e33-527e-43d8-af2c-7bb4bdac51e2, reason: No device with interface-id 27e88e33-527e-43d8-af2c-7bb4bdac51e2 exists on VM: nova.exception.NotFound: No device with interface-id 27e88e33-527e-43d8-af2c-7bb4bdac51e2 exists on VM [ 801.637510] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e32d5426-ff6e-4a60-a9e5-70929a9adbcd tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquiring lock "fe3e2b2a-9583-482e-b69b-6c130801d7db" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.637818] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e32d5426-ff6e-4a60-a9e5-70929a9adbcd tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "fe3e2b2a-9583-482e-b69b-6c130801d7db" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.638149] env[63345]: INFO nova.compute.manager [None req-e32d5426-ff6e-4a60-a9e5-70929a9adbcd tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Rebooting instance [ 801.687273] env[63345]: DEBUG nova.compute.manager [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 801.694189] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: c07c7f5d-a674-458f-8253-1bc2d61be6c1] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 801.825161] env[63345]: DEBUG nova.network.neutron [req-2fcb550b-30a8-4fc0-9d9e-0d350c1c491d req-26a53197-d6b8-45ee-ae15-f09849e9da35 service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Updated VIF entry in instance network info cache for port 5c61daf5-e16c-4171-8fbf-a8d0108d4a21. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 801.825680] env[63345]: DEBUG nova.network.neutron [req-2fcb550b-30a8-4fc0-9d9e-0d350c1c491d req-26a53197-d6b8-45ee-ae15-f09849e9da35 service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Updating instance_info_cache with network_info: [{"id": "5c61daf5-e16c-4171-8fbf-a8d0108d4a21", "address": "fa:16:3e:5b:b7:f5", "network": {"id": "441f27c7-de99-494b-9db5-8e67e3c8e7b6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-592603355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f0343855b6147f38b0cb3f2c72330e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c61daf5-e1", "ovs_interfaceid": "5c61daf5-e16c-4171-8fbf-a8d0108d4a21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.036371] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "refresh_cache-85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.036371] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "refresh_cache-85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.036371] env[63345]: DEBUG nova.network.neutron [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 802.159715] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e05419e-2000-4aa8-a607-257914ce88e2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.168524] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e32d5426-ff6e-4a60-a9e5-70929a9adbcd tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquiring lock "refresh_cache-fe3e2b2a-9583-482e-b69b-6c130801d7db" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.169614] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49485eaa-6b3e-450c-b60b-794881efa2e2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.203281] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 0d5cb238-2d25-47b1-8ce6-15a20836dbfb] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 802.205123] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad68452-122e-4384-a333-da62e8c4e0b8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.212904] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a19c6e73-4bcb-43a4-a3aa-71ce9075d1d3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.226363] env[63345]: DEBUG nova.compute.provider_tree [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.334839] env[63345]: DEBUG oslo_concurrency.lockutils [req-2fcb550b-30a8-4fc0-9d9e-0d350c1c491d req-26a53197-d6b8-45ee-ae15-f09849e9da35 service nova] Releasing lock "refresh_cache-fe3e2b2a-9583-482e-b69b-6c130801d7db" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.334839] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e32d5426-ff6e-4a60-a9e5-70929a9adbcd tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquired lock "refresh_cache-fe3e2b2a-9583-482e-b69b-6c130801d7db" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.334839] env[63345]: DEBUG nova.network.neutron [None req-e32d5426-ff6e-4a60-a9e5-70929a9adbcd tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 802.672561] env[63345]: DEBUG oslo_concurrency.lockutils [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "f37b4a95-0725-4a84-b726-fd4f26e87020" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.672796] env[63345]: DEBUG oslo_concurrency.lockutils [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "f37b4a95-0725-4a84-b726-fd4f26e87020" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.709856] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: e6bc8cb9-2f1a-49cb-974d-ea9a211126ee] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 802.716262] env[63345]: DEBUG nova.compute.manager [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 802.731690] env[63345]: DEBUG nova.scheduler.client.report [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 802.797645] env[63345]: DEBUG nova.network.neutron [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Updating instance_info_cache with network_info: [{"id": "989bf403-079f-46b9-ab79-c645cec393aa", "address": "fa:16:3e:93:20:cd", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989bf403-07", "ovs_interfaceid": "989bf403-079f-46b9-ab79-c645cec393aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.839684] env[63345]: DEBUG nova.virt.hardware [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='613dab18864eb25560eb0e79a89b4633',container_format='bare',created_at=2024-09-30T09:38:12Z,direct_url=,disk_format='vmdk',id=f0cb364f-cc7f-4213-88ce-b8773612e90e,min_disk=1,min_ram=0,name='tempest-test-snap-1394131402',owner='dfc1248fb5ee4f798b6c59154d4cf623',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-09-30T09:38:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 802.839936] env[63345]: DEBUG nova.virt.hardware [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 802.840108] env[63345]: DEBUG nova.virt.hardware [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 802.840298] env[63345]: DEBUG nova.virt.hardware [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 802.840446] env[63345]: DEBUG nova.virt.hardware [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 802.840595] env[63345]: DEBUG nova.virt.hardware [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 802.840830] env[63345]: DEBUG nova.virt.hardware [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 802.841022] env[63345]: DEBUG nova.virt.hardware [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 802.841203] env[63345]: DEBUG nova.virt.hardware [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 802.841367] env[63345]: DEBUG nova.virt.hardware [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 802.841539] env[63345]: DEBUG nova.virt.hardware [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 802.844291] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2583b95-080b-4406-a2f5-a8c970ad37ed {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.853293] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1de6c94-b9a6-4c5e-b9a7-f17aa04eb4f5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.083475] env[63345]: DEBUG nova.network.neutron [None req-e32d5426-ff6e-4a60-a9e5-70929a9adbcd tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Updating instance_info_cache with network_info: [{"id": "5c61daf5-e16c-4171-8fbf-a8d0108d4a21", "address": "fa:16:3e:5b:b7:f5", "network": {"id": "441f27c7-de99-494b-9db5-8e67e3c8e7b6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-592603355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f0343855b6147f38b0cb3f2c72330e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c61daf5-e1", "ovs_interfaceid": "5c61daf5-e16c-4171-8fbf-a8d0108d4a21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.146580] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "85fb1ecd-4ca3-401d-a87a-131f0b275506" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.146855] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "85fb1ecd-4ca3-401d-a87a-131f0b275506" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.147087] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "85fb1ecd-4ca3-401d-a87a-131f0b275506-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.147286] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "85fb1ecd-4ca3-401d-a87a-131f0b275506-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.147525] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "85fb1ecd-4ca3-401d-a87a-131f0b275506-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.149711] env[63345]: INFO nova.compute.manager [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Terminating instance [ 803.162241] env[63345]: DEBUG nova.compute.manager [req-46208fe3-1c07-4911-80b9-9d77a2fd33c1 req-350326a3-1676-4119-9c1b-10a057d0119d service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Received event network-changed-c6c991f3-51b9-4502-af97-3ca846db3c73 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 803.162560] env[63345]: DEBUG nova.compute.manager [req-46208fe3-1c07-4911-80b9-9d77a2fd33c1 req-350326a3-1676-4119-9c1b-10a057d0119d service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Refreshing instance network info cache due to event network-changed-c6c991f3-51b9-4502-af97-3ca846db3c73. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 803.162641] env[63345]: DEBUG oslo_concurrency.lockutils [req-46208fe3-1c07-4911-80b9-9d77a2fd33c1 req-350326a3-1676-4119-9c1b-10a057d0119d service nova] Acquiring lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.164236] env[63345]: DEBUG oslo_concurrency.lockutils [req-46208fe3-1c07-4911-80b9-9d77a2fd33c1 req-350326a3-1676-4119-9c1b-10a057d0119d service nova] Acquired lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.164236] env[63345]: DEBUG nova.network.neutron [req-46208fe3-1c07-4911-80b9-9d77a2fd33c1 req-350326a3-1676-4119-9c1b-10a057d0119d service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Refreshing network info cache for port c6c991f3-51b9-4502-af97-3ca846db3c73 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 803.179294] env[63345]: DEBUG nova.compute.manager [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 803.218669] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 4d41f4a7-4fde-4d34-be7c-533c00fe5ae6] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 803.234973] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.562s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.235523] env[63345]: DEBUG nova.compute.manager [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 803.239019] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.114s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.239292] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.242667] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.038s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.242899] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.247016] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.624s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.247016] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.248274] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.133s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.249998] env[63345]: INFO nova.compute.claims [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 803.280721] env[63345]: INFO nova.scheduler.client.report [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Deleted allocations for instance 778faa4f-4c5f-4ec2-b17b-5d7513c9c218 [ 803.289018] env[63345]: INFO nova.scheduler.client.report [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Deleted allocations for instance 93112cc1-f9a1-4188-9555-bddf483426a1 [ 803.300826] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "refresh_cache-85fb1ecd-4ca3-401d-a87a-131f0b275506" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.309772] env[63345]: INFO nova.scheduler.client.report [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Deleted allocations for instance 7bef089c-e93b-4ba6-a683-4e076489f92a [ 803.313678] env[63345]: WARNING oslo_messaging._drivers.amqpdriver [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 803.326869] env[63345]: DEBUG nova.network.neutron [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Successfully updated port: f14abadb-834f-4695-87e7-c79a8d8b328e {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 803.441545] env[63345]: DEBUG nova.compute.manager [req-5df17059-3aa1-4e1b-ab21-fb29457c9689 req-48c59f57-2a1f-4d6b-996e-a8336e92cb8d service nova] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Received event network-vif-plugged-f14abadb-834f-4695-87e7-c79a8d8b328e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 803.444319] env[63345]: DEBUG oslo_concurrency.lockutils [req-5df17059-3aa1-4e1b-ab21-fb29457c9689 req-48c59f57-2a1f-4d6b-996e-a8336e92cb8d service nova] Acquiring lock "4f108dcc-c130-4c3f-840d-7a912150db3f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.445044] env[63345]: DEBUG oslo_concurrency.lockutils [req-5df17059-3aa1-4e1b-ab21-fb29457c9689 req-48c59f57-2a1f-4d6b-996e-a8336e92cb8d service nova] Lock "4f108dcc-c130-4c3f-840d-7a912150db3f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.445313] env[63345]: DEBUG oslo_concurrency.lockutils [req-5df17059-3aa1-4e1b-ab21-fb29457c9689 req-48c59f57-2a1f-4d6b-996e-a8336e92cb8d service nova] Lock "4f108dcc-c130-4c3f-840d-7a912150db3f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.445519] env[63345]: DEBUG nova.compute.manager [req-5df17059-3aa1-4e1b-ab21-fb29457c9689 req-48c59f57-2a1f-4d6b-996e-a8336e92cb8d service nova] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] No waiting events found dispatching network-vif-plugged-f14abadb-834f-4695-87e7-c79a8d8b328e {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 803.445702] env[63345]: WARNING nova.compute.manager [req-5df17059-3aa1-4e1b-ab21-fb29457c9689 req-48c59f57-2a1f-4d6b-996e-a8336e92cb8d service nova] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Received unexpected event network-vif-plugged-f14abadb-834f-4695-87e7-c79a8d8b328e for instance with vm_state building and task_state spawning. [ 803.589730] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e32d5426-ff6e-4a60-a9e5-70929a9adbcd tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Releasing lock "refresh_cache-fe3e2b2a-9583-482e-b69b-6c130801d7db" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.592872] env[63345]: DEBUG nova.compute.manager [None req-e32d5426-ff6e-4a60-a9e5-70929a9adbcd tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 803.594232] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade8b8cb-4b4b-4972-a4d4-4e01f510cd7b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.653255] env[63345]: DEBUG nova.compute.manager [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 803.653501] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 803.654364] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce7e078-a086-4611-85af-34b40bff6b4c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.662669] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 803.662916] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9278c9f-54a0-49d5-9000-3300c9d1fda4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.669537] env[63345]: DEBUG oslo_vmware.api [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 803.669537] env[63345]: value = "task-1017109" [ 803.669537] env[63345]: _type = "Task" [ 803.669537] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.677328] env[63345]: DEBUG oslo_vmware.api [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017109, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.702061] env[63345]: DEBUG oslo_concurrency.lockutils [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.723180] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 27c6dc17-4ded-4fe7-8fba-265eae64fc32] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 803.756555] env[63345]: DEBUG nova.compute.utils [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 803.765022] env[63345]: DEBUG nova.compute.manager [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 803.765022] env[63345]: DEBUG nova.network.neutron [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 803.793022] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7d7b1dc2-9273-4a08-b3a4-774abefa7e1c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "778faa4f-4c5f-4ec2-b17b-5d7513c9c218" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.367s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.794812] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f59e42eb-b249-4086-aed8-19c9a3367f3e tempest-ServersTestMultiNic-1173239763 tempest-ServersTestMultiNic-1173239763-project-member] Lock "93112cc1-f9a1-4188-9555-bddf483426a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.680s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.805030] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6293e3dc-2ff4-4de7-a0b7-8887400036b2 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-85fb1ecd-4ca3-401d-a87a-131f0b275506-27e88e33-527e-43d8-af2c-7bb4bdac51e2" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.672s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.820742] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2386134b-d3d3-410a-9d58-55819fc91c6b tempest-ServersTestBootFromVolume-1492740413 tempest-ServersTestBootFromVolume-1492740413-project-member] Lock "7bef089c-e93b-4ba6-a683-4e076489f92a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.333s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.828852] env[63345]: DEBUG nova.policy [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27fc4f99c7f44b1ea421bd8f13de6e43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '964cee117b3c4601b3afe82a8bb9c23e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 803.830830] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "refresh_cache-4f108dcc-c130-4c3f-840d-7a912150db3f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.831027] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquired lock "refresh_cache-4f108dcc-c130-4c3f-840d-7a912150db3f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.831699] env[63345]: DEBUG nova.network.neutron [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 804.029304] env[63345]: DEBUG nova.network.neutron [req-46208fe3-1c07-4911-80b9-9d77a2fd33c1 req-350326a3-1676-4119-9c1b-10a057d0119d service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Updated VIF entry in instance network info cache for port c6c991f3-51b9-4502-af97-3ca846db3c73. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 804.029707] env[63345]: DEBUG nova.network.neutron [req-46208fe3-1c07-4911-80b9-9d77a2fd33c1 req-350326a3-1676-4119-9c1b-10a057d0119d service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Updating instance_info_cache with network_info: [{"id": "c6c991f3-51b9-4502-af97-3ca846db3c73", "address": "fa:16:3e:87:40:da", "network": {"id": "e38fba0e-9c96-4a09-b0f0-08546e52eba6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-944344960-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "307c1bea8f6d47ddb4d5ebac8bba25ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6c991f3-51", "ovs_interfaceid": "c6c991f3-51b9-4502-af97-3ca846db3c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.180373] env[63345]: DEBUG oslo_vmware.api [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017109, 'name': PowerOffVM_Task, 'duration_secs': 0.236837} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.180678] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 804.180909] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 804.181225] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b3b27a76-6928-474e-93cd-339a6c3f4265 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.226607] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 46d3332a-bfb9-4812-8201-a87467ce5151] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 804.252054] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 804.253082] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 804.253082] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Deleting the datastore file [datastore2] 85fb1ecd-4ca3-401d-a87a-131f0b275506 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 804.253082] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ecbffe3-411a-442e-9ae2-630f0781c52a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.260609] env[63345]: DEBUG oslo_vmware.api [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 804.260609] env[63345]: value = "task-1017111" [ 804.260609] env[63345]: _type = "Task" [ 804.260609] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.269838] env[63345]: DEBUG nova.compute.manager [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 804.276628] env[63345]: DEBUG oslo_vmware.api [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017111, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.318241] env[63345]: DEBUG nova.network.neutron [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Successfully created port: 71b7616d-5472-4d3c-a8ca-6984d7c70c12 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 804.383244] env[63345]: DEBUG nova.network.neutron [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 804.534224] env[63345]: DEBUG oslo_concurrency.lockutils [req-46208fe3-1c07-4911-80b9-9d77a2fd33c1 req-350326a3-1676-4119-9c1b-10a057d0119d service nova] Releasing lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.534515] env[63345]: DEBUG nova.compute.manager [req-46208fe3-1c07-4911-80b9-9d77a2fd33c1 req-350326a3-1676-4119-9c1b-10a057d0119d service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Received event network-changed-c6c991f3-51b9-4502-af97-3ca846db3c73 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 804.534698] env[63345]: DEBUG nova.compute.manager [req-46208fe3-1c07-4911-80b9-9d77a2fd33c1 req-350326a3-1676-4119-9c1b-10a057d0119d service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Refreshing instance network info cache due to event network-changed-c6c991f3-51b9-4502-af97-3ca846db3c73. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 804.535022] env[63345]: DEBUG oslo_concurrency.lockutils [req-46208fe3-1c07-4911-80b9-9d77a2fd33c1 req-350326a3-1676-4119-9c1b-10a057d0119d service nova] Acquiring lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.535076] env[63345]: DEBUG oslo_concurrency.lockutils [req-46208fe3-1c07-4911-80b9-9d77a2fd33c1 req-350326a3-1676-4119-9c1b-10a057d0119d service nova] Acquired lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.535259] env[63345]: DEBUG nova.network.neutron [req-46208fe3-1c07-4911-80b9-9d77a2fd33c1 req-350326a3-1676-4119-9c1b-10a057d0119d service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Refreshing network info cache for port c6c991f3-51b9-4502-af97-3ca846db3c73 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 804.615976] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7274e8b3-f1a1-45bb-8b1b-e068481cef41 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.623896] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e32d5426-ff6e-4a60-a9e5-70929a9adbcd tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Doing hard reboot of VM {{(pid=63345) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1064}} [ 804.624206] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-06d519be-99bf-47de-a7d6-cdcf200ee8fb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.630554] env[63345]: DEBUG oslo_vmware.api [None req-e32d5426-ff6e-4a60-a9e5-70929a9adbcd tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 804.630554] env[63345]: value = "task-1017112" [ 804.630554] env[63345]: _type = "Task" [ 804.630554] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.646018] env[63345]: DEBUG oslo_vmware.api [None req-e32d5426-ff6e-4a60-a9e5-70929a9adbcd tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017112, 'name': ResetVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.730452] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 04fd7aaa-658d-480d-8465-825f120477bc] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 804.751108] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f434b0a4-f872-423b-84f2-ed06bee7ad34 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.758527] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f98799b-3a00-42fa-9cf5-7b48e1104883 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.813072] env[63345]: DEBUG oslo_vmware.api [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017111, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.413589} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.818229] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1330c61e-8ad6-4d2a-9164-6cf635998540 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.819125] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 804.819368] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 804.819570] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 804.819772] env[63345]: INFO nova.compute.manager [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Took 1.17 seconds to destroy the instance on the hypervisor. [ 804.820015] env[63345]: DEBUG oslo.service.loopingcall [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 804.821351] env[63345]: DEBUG nova.compute.manager [-] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 804.821351] env[63345]: DEBUG nova.network.neutron [-] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 804.829257] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ebb3302-ebc4-4500-a502-baf3ea901b28 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.852620] env[63345]: DEBUG nova.compute.provider_tree [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 804.924983] env[63345]: DEBUG nova.network.neutron [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Updating instance_info_cache with network_info: [{"id": "f14abadb-834f-4695-87e7-c79a8d8b328e", "address": "fa:16:3e:b4:8c:2c", "network": {"id": "6adcb593-15d5-4959-9e09-f7794e033f9e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1117018512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfc1248fb5ee4f798b6c59154d4cf623", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf14abadb-83", "ovs_interfaceid": "f14abadb-834f-4695-87e7-c79a8d8b328e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.145934] env[63345]: DEBUG oslo_vmware.api [None req-e32d5426-ff6e-4a60-a9e5-70929a9adbcd tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017112, 'name': ResetVM_Task, 'duration_secs': 0.125395} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.146562] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e32d5426-ff6e-4a60-a9e5-70929a9adbcd tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Did hard reboot of VM {{(pid=63345) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1068}} [ 805.146859] env[63345]: DEBUG nova.compute.manager [None req-e32d5426-ff6e-4a60-a9e5-70929a9adbcd tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 805.147777] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c163f7-a7c6-44a3-93e9-c0740d66e30b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.235925] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: b4a7d6dd-98dc-49d8-b344-1878cd5a3f51] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 805.260444] env[63345]: DEBUG nova.network.neutron [req-46208fe3-1c07-4911-80b9-9d77a2fd33c1 req-350326a3-1676-4119-9c1b-10a057d0119d service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Updated VIF entry in instance network info cache for port c6c991f3-51b9-4502-af97-3ca846db3c73. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 805.260958] env[63345]: DEBUG nova.network.neutron [req-46208fe3-1c07-4911-80b9-9d77a2fd33c1 req-350326a3-1676-4119-9c1b-10a057d0119d service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Updating instance_info_cache with network_info: [{"id": "c6c991f3-51b9-4502-af97-3ca846db3c73", "address": "fa:16:3e:87:40:da", "network": {"id": "e38fba0e-9c96-4a09-b0f0-08546e52eba6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-944344960-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "307c1bea8f6d47ddb4d5ebac8bba25ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6c991f3-51", "ovs_interfaceid": "c6c991f3-51b9-4502-af97-3ca846db3c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.322701] env[63345]: DEBUG nova.compute.manager [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 805.348818] env[63345]: DEBUG nova.virt.hardware [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 805.349094] env[63345]: DEBUG nova.virt.hardware [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 805.349270] env[63345]: DEBUG nova.virt.hardware [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 805.349462] env[63345]: DEBUG nova.virt.hardware [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 805.349615] env[63345]: DEBUG nova.virt.hardware [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 805.349764] env[63345]: DEBUG nova.virt.hardware [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 805.349969] env[63345]: DEBUG nova.virt.hardware [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 805.350159] env[63345]: DEBUG nova.virt.hardware [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 805.350333] env[63345]: DEBUG nova.virt.hardware [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 805.350501] env[63345]: DEBUG nova.virt.hardware [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 805.350676] env[63345]: DEBUG nova.virt.hardware [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 805.351568] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a20e533-a07a-4c45-a4ef-3ed1be76ecf9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.360698] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86846c70-7158-4f46-8805-19b0477b1dcb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.381589] env[63345]: ERROR nova.scheduler.client.report [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [req-6ac15bfb-a11b-4d67-94d1-38759542323d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6ac15bfb-a11b-4d67-94d1-38759542323d"}]} [ 805.410572] env[63345]: DEBUG nova.scheduler.client.report [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 805.426867] env[63345]: DEBUG nova.scheduler.client.report [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 805.427209] env[63345]: DEBUG nova.compute.provider_tree [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 805.429545] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Releasing lock "refresh_cache-4f108dcc-c130-4c3f-840d-7a912150db3f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.429838] env[63345]: DEBUG nova.compute.manager [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Instance network_info: |[{"id": "f14abadb-834f-4695-87e7-c79a8d8b328e", "address": "fa:16:3e:b4:8c:2c", "network": {"id": "6adcb593-15d5-4959-9e09-f7794e033f9e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1117018512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfc1248fb5ee4f798b6c59154d4cf623", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf14abadb-83", "ovs_interfaceid": "f14abadb-834f-4695-87e7-c79a8d8b328e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 805.430438] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:8c:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '450939f7-f74b-41f7-93f7-b4fde6a6fbed', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f14abadb-834f-4695-87e7-c79a8d8b328e', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 805.437890] env[63345]: DEBUG oslo.service.loopingcall [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 805.438127] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 805.438355] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a040d0da-e23d-436a-9f16-465f4595a398 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.453637] env[63345]: DEBUG nova.scheduler.client.report [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 805.462383] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 805.462383] env[63345]: value = "task-1017113" [ 805.462383] env[63345]: _type = "Task" [ 805.462383] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.470490] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017113, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.486471] env[63345]: DEBUG nova.scheduler.client.report [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 805.660078] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e32d5426-ff6e-4a60-a9e5-70929a9adbcd tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "fe3e2b2a-9583-482e-b69b-6c130801d7db" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.022s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.739896] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 28caa5f5-141a-4ef9-abb3-33a1973d99cf] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 805.767500] env[63345]: DEBUG oslo_concurrency.lockutils [req-46208fe3-1c07-4911-80b9-9d77a2fd33c1 req-350326a3-1676-4119-9c1b-10a057d0119d service nova] Releasing lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.899856] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e5d934-cf72-45bc-9fbf-54c7b4e2c664 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.908632] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b9937f-a99c-4162-907f-2d2f3e784835 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.950543] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d325a73-4b0f-4d82-84ec-6f00e5bf6a08 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.958666] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483113d0-2264-46bf-80bf-1fc3ad66625e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.975479] env[63345]: DEBUG nova.compute.provider_tree [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 805.979583] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017113, 'name': CreateVM_Task, 'duration_secs': 0.400241} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.979747] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 805.980432] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f0cb364f-cc7f-4213-88ce-b8773612e90e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.980606] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f0cb364f-cc7f-4213-88ce-b8773612e90e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.981076] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f0cb364f-cc7f-4213-88ce-b8773612e90e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 805.981524] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26e07c52-9859-43dc-99cc-5e72e2534ea1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.986372] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 805.986372] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c0cd4f-6a30-7676-4d76-0b6e164d4e4b" [ 805.986372] env[63345]: _type = "Task" [ 805.986372] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.993465] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c0cd4f-6a30-7676-4d76-0b6e164d4e4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.243233] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: ee31689b-bf0b-4737-86c7-5451c763e603] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 806.312893] env[63345]: DEBUG nova.compute.manager [req-ca6ea5ae-739b-4d7e-ac59-e15a8a15cb33 req-27376b58-bef9-4b9a-a8e1-c4eed21c2f46 service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Received event network-changed-c6c991f3-51b9-4502-af97-3ca846db3c73 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 806.312893] env[63345]: DEBUG nova.compute.manager [req-ca6ea5ae-739b-4d7e-ac59-e15a8a15cb33 req-27376b58-bef9-4b9a-a8e1-c4eed21c2f46 service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Refreshing instance network info cache due to event network-changed-c6c991f3-51b9-4502-af97-3ca846db3c73. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 806.312893] env[63345]: DEBUG oslo_concurrency.lockutils [req-ca6ea5ae-739b-4d7e-ac59-e15a8a15cb33 req-27376b58-bef9-4b9a-a8e1-c4eed21c2f46 service nova] Acquiring lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.312893] env[63345]: DEBUG oslo_concurrency.lockutils [req-ca6ea5ae-739b-4d7e-ac59-e15a8a15cb33 req-27376b58-bef9-4b9a-a8e1-c4eed21c2f46 service nova] Acquired lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.313121] env[63345]: DEBUG nova.network.neutron [req-ca6ea5ae-739b-4d7e-ac59-e15a8a15cb33 req-27376b58-bef9-4b9a-a8e1-c4eed21c2f46 service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Refreshing network info cache for port c6c991f3-51b9-4502-af97-3ca846db3c73 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 806.460722] env[63345]: DEBUG nova.compute.manager [req-14cd0bfb-b903-4830-bd93-e535eadb2972 req-0bd1ad93-ed5d-489a-821a-c6267a46daf5 service nova] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Received event network-changed-f14abadb-834f-4695-87e7-c79a8d8b328e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 806.460968] env[63345]: DEBUG nova.compute.manager [req-14cd0bfb-b903-4830-bd93-e535eadb2972 req-0bd1ad93-ed5d-489a-821a-c6267a46daf5 service nova] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Refreshing instance network info cache due to event network-changed-f14abadb-834f-4695-87e7-c79a8d8b328e. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 806.461528] env[63345]: DEBUG oslo_concurrency.lockutils [req-14cd0bfb-b903-4830-bd93-e535eadb2972 req-0bd1ad93-ed5d-489a-821a-c6267a46daf5 service nova] Acquiring lock "refresh_cache-4f108dcc-c130-4c3f-840d-7a912150db3f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.461725] env[63345]: DEBUG oslo_concurrency.lockutils [req-14cd0bfb-b903-4830-bd93-e535eadb2972 req-0bd1ad93-ed5d-489a-821a-c6267a46daf5 service nova] Acquired lock "refresh_cache-4f108dcc-c130-4c3f-840d-7a912150db3f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.461941] env[63345]: DEBUG nova.network.neutron [req-14cd0bfb-b903-4830-bd93-e535eadb2972 req-0bd1ad93-ed5d-489a-821a-c6267a46daf5 service nova] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Refreshing network info cache for port f14abadb-834f-4695-87e7-c79a8d8b328e {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 806.493379] env[63345]: DEBUG nova.network.neutron [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Successfully updated port: 71b7616d-5472-4d3c-a8ca-6984d7c70c12 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 806.512832] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f0cb364f-cc7f-4213-88ce-b8773612e90e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.513606] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Processing image f0cb364f-cc7f-4213-88ce-b8773612e90e {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 806.513606] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f0cb364f-cc7f-4213-88ce-b8773612e90e/f0cb364f-cc7f-4213-88ce-b8773612e90e.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.513606] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f0cb364f-cc7f-4213-88ce-b8773612e90e/f0cb364f-cc7f-4213-88ce-b8773612e90e.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.513847] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 806.514971] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b22e87e-49df-41e0-b703-599044b4fb15 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.527096] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 806.527376] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 806.528994] env[63345]: DEBUG nova.scheduler.client.report [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 92 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 806.529387] env[63345]: DEBUG nova.compute.provider_tree [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 92 to 93 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 806.529526] env[63345]: DEBUG nova.compute.provider_tree [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 806.536190] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-755e05df-d691-4ef5-b2c6-65826aa4309c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.545664] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 806.545664] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b3d5f7-6966-e073-8115-312b8b9121a5" [ 806.545664] env[63345]: _type = "Task" [ 806.545664] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.553268] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b3d5f7-6966-e073-8115-312b8b9121a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.722021] env[63345]: DEBUG nova.network.neutron [-] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.747934] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 1f595aef-799f-4ca4-be91-e95ef056926c] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 807.004281] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "refresh_cache-11652422-9136-4453-b932-06695f9bc910" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.004991] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "refresh_cache-11652422-9136-4453-b932-06695f9bc910" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.005825] env[63345]: DEBUG nova.network.neutron [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 807.041178] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.793s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.043197] env[63345]: DEBUG nova.compute.manager [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 807.046866] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.693s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.051473] env[63345]: INFO nova.compute.claims [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 807.076314] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Preparing fetch location {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 807.076314] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Fetch image to [datastore2] OSTACK_IMG_fe7cd087-e654-4d71-a059-e388f586fc9a/OSTACK_IMG_fe7cd087-e654-4d71-a059-e388f586fc9a.vmdk {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 807.076314] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Downloading stream optimized image f0cb364f-cc7f-4213-88ce-b8773612e90e to [datastore2] OSTACK_IMG_fe7cd087-e654-4d71-a059-e388f586fc9a/OSTACK_IMG_fe7cd087-e654-4d71-a059-e388f586fc9a.vmdk on the data store datastore2 as vApp {{(pid=63345) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 807.076314] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Downloading image file data f0cb364f-cc7f-4213-88ce-b8773612e90e to the ESX as VM named 'OSTACK_IMG_fe7cd087-e654-4d71-a059-e388f586fc9a' {{(pid=63345) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 807.172652] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 807.172652] env[63345]: value = "resgroup-9" [ 807.172652] env[63345]: _type = "ResourcePool" [ 807.172652] env[63345]: }. {{(pid=63345) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 807.172910] env[63345]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-58582517-7ce5-48e2-a885-fcdaa72f7e2e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.197801] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lease: (returnval){ [ 807.197801] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529610ac-b784-d196-a83f-d6780debcb63" [ 807.197801] env[63345]: _type = "HttpNfcLease" [ 807.197801] env[63345]: } obtained for vApp import into resource pool (val){ [ 807.197801] env[63345]: value = "resgroup-9" [ 807.197801] env[63345]: _type = "ResourcePool" [ 807.197801] env[63345]: }. {{(pid=63345) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 807.198073] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the lease: (returnval){ [ 807.198073] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529610ac-b784-d196-a83f-d6780debcb63" [ 807.198073] env[63345]: _type = "HttpNfcLease" [ 807.198073] env[63345]: } to be ready. {{(pid=63345) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 807.205205] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 807.205205] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529610ac-b784-d196-a83f-d6780debcb63" [ 807.205205] env[63345]: _type = "HttpNfcLease" [ 807.205205] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 807.223555] env[63345]: DEBUG nova.network.neutron [req-ca6ea5ae-739b-4d7e-ac59-e15a8a15cb33 req-27376b58-bef9-4b9a-a8e1-c4eed21c2f46 service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Updated VIF entry in instance network info cache for port c6c991f3-51b9-4502-af97-3ca846db3c73. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 807.223922] env[63345]: DEBUG nova.network.neutron [req-ca6ea5ae-739b-4d7e-ac59-e15a8a15cb33 req-27376b58-bef9-4b9a-a8e1-c4eed21c2f46 service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Updating instance_info_cache with network_info: [{"id": "c6c991f3-51b9-4502-af97-3ca846db3c73", "address": "fa:16:3e:87:40:da", "network": {"id": "e38fba0e-9c96-4a09-b0f0-08546e52eba6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-944344960-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "307c1bea8f6d47ddb4d5ebac8bba25ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6c991f3-51", "ovs_interfaceid": "c6c991f3-51b9-4502-af97-3ca846db3c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.225309] env[63345]: INFO nova.compute.manager [-] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Took 2.40 seconds to deallocate network for instance. [ 807.252267] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 5ef55aca-0714-4b34-85f2-b6d53f97c2d0] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 807.549010] env[63345]: DEBUG nova.network.neutron [req-14cd0bfb-b903-4830-bd93-e535eadb2972 req-0bd1ad93-ed5d-489a-821a-c6267a46daf5 service nova] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Updated VIF entry in instance network info cache for port f14abadb-834f-4695-87e7-c79a8d8b328e. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 807.549419] env[63345]: DEBUG nova.network.neutron [req-14cd0bfb-b903-4830-bd93-e535eadb2972 req-0bd1ad93-ed5d-489a-821a-c6267a46daf5 service nova] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Updating instance_info_cache with network_info: [{"id": "f14abadb-834f-4695-87e7-c79a8d8b328e", "address": "fa:16:3e:b4:8c:2c", "network": {"id": "6adcb593-15d5-4959-9e09-f7794e033f9e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1117018512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfc1248fb5ee4f798b6c59154d4cf623", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf14abadb-83", "ovs_interfaceid": "f14abadb-834f-4695-87e7-c79a8d8b328e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.568192] env[63345]: DEBUG nova.compute.utils [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 807.570045] env[63345]: DEBUG nova.compute.manager [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 807.570288] env[63345]: DEBUG nova.network.neutron [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 807.579588] env[63345]: DEBUG nova.network.neutron [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 807.674120] env[63345]: DEBUG nova.policy [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '15bc99129abe498abb7549a6578a68d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba08f64c26d245a8b8f2b52ea97c2f1a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 807.708078] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 807.708078] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529610ac-b784-d196-a83f-d6780debcb63" [ 807.708078] env[63345]: _type = "HttpNfcLease" [ 807.708078] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 807.729023] env[63345]: DEBUG oslo_concurrency.lockutils [req-ca6ea5ae-739b-4d7e-ac59-e15a8a15cb33 req-27376b58-bef9-4b9a-a8e1-c4eed21c2f46 service nova] Releasing lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.733429] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.757214] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 51d6db80-9d1f-4e38-a564-f587474f6294] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 807.930817] env[63345]: DEBUG nova.network.neutron [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Updating instance_info_cache with network_info: [{"id": "71b7616d-5472-4d3c-a8ca-6984d7c70c12", "address": "fa:16:3e:ba:da:55", "network": {"id": "80bb8388-e130-46af-a4fc-1daea51d1bf5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1343573007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "964cee117b3c4601b3afe82a8bb9c23e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71b7616d-54", "ovs_interfaceid": "71b7616d-5472-4d3c-a8ca-6984d7c70c12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.052786] env[63345]: DEBUG oslo_concurrency.lockutils [req-14cd0bfb-b903-4830-bd93-e535eadb2972 req-0bd1ad93-ed5d-489a-821a-c6267a46daf5 service nova] Releasing lock "refresh_cache-4f108dcc-c130-4c3f-840d-7a912150db3f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.073943] env[63345]: DEBUG nova.compute.manager [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 808.208445] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 808.208445] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529610ac-b784-d196-a83f-d6780debcb63" [ 808.208445] env[63345]: _type = "HttpNfcLease" [ 808.208445] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 808.263697] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 808.264118] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Cleaning up deleted instances with incomplete migration {{(pid=63345) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11595}} [ 808.316172] env[63345]: DEBUG nova.network.neutron [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Successfully created port: 025d1e18-19a3-43ce-9db9-1590137a5544 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 808.436839] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "refresh_cache-11652422-9136-4453-b932-06695f9bc910" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.438410] env[63345]: DEBUG nova.compute.manager [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Instance network_info: |[{"id": "71b7616d-5472-4d3c-a8ca-6984d7c70c12", "address": "fa:16:3e:ba:da:55", "network": {"id": "80bb8388-e130-46af-a4fc-1daea51d1bf5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1343573007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "964cee117b3c4601b3afe82a8bb9c23e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71b7616d-54", "ovs_interfaceid": "71b7616d-5472-4d3c-a8ca-6984d7c70c12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 808.438501] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:da:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ddfb706a-add1-4e16-9ac4-d20b16a1df6d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71b7616d-5472-4d3c-a8ca-6984d7c70c12', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 808.449206] env[63345]: DEBUG oslo.service.loopingcall [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 808.449486] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11652422-9136-4453-b932-06695f9bc910] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 808.449719] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5e17db3-adf5-4f5a-b83c-eec00c43c0f7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.476341] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 808.476341] env[63345]: value = "task-1017115" [ 808.476341] env[63345]: _type = "Task" [ 808.476341] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.485997] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017115, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.563078] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29337fc-ed3f-422f-81e8-4f6d8c8c4e2d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.574399] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9166e487-7197-49b0-8254-8563b197e3be {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.611237] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104f1120-694b-460d-93ba-ee965927c08b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.618864] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dab5c63-4b05-44fe-b079-2943b9cf304f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.634114] env[63345]: DEBUG nova.compute.provider_tree [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.686462] env[63345]: DEBUG nova.compute.manager [req-cfbded29-86cf-4bb6-91f4-88a1d189af64 req-95945bb4-5493-4b64-b67f-8622cc0a6529 service nova] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Received event network-vif-deleted-989bf403-079f-46b9-ab79-c645cec393aa {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 808.686652] env[63345]: DEBUG nova.compute.manager [req-cfbded29-86cf-4bb6-91f4-88a1d189af64 req-95945bb4-5493-4b64-b67f-8622cc0a6529 service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Received event network-changed-c6c991f3-51b9-4502-af97-3ca846db3c73 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 808.686832] env[63345]: DEBUG nova.compute.manager [req-cfbded29-86cf-4bb6-91f4-88a1d189af64 req-95945bb4-5493-4b64-b67f-8622cc0a6529 service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Refreshing instance network info cache due to event network-changed-c6c991f3-51b9-4502-af97-3ca846db3c73. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 808.687140] env[63345]: DEBUG oslo_concurrency.lockutils [req-cfbded29-86cf-4bb6-91f4-88a1d189af64 req-95945bb4-5493-4b64-b67f-8622cc0a6529 service nova] Acquiring lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.688061] env[63345]: DEBUG oslo_concurrency.lockutils [req-cfbded29-86cf-4bb6-91f4-88a1d189af64 req-95945bb4-5493-4b64-b67f-8622cc0a6529 service nova] Acquired lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.688061] env[63345]: DEBUG nova.network.neutron [req-cfbded29-86cf-4bb6-91f4-88a1d189af64 req-95945bb4-5493-4b64-b67f-8622cc0a6529 service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Refreshing network info cache for port c6c991f3-51b9-4502-af97-3ca846db3c73 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 808.710536] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 808.710536] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529610ac-b784-d196-a83f-d6780debcb63" [ 808.710536] env[63345]: _type = "HttpNfcLease" [ 808.710536] env[63345]: } is ready. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 808.711833] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 808.711833] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529610ac-b784-d196-a83f-d6780debcb63" [ 808.711833] env[63345]: _type = "HttpNfcLease" [ 808.711833] env[63345]: }. {{(pid=63345) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 808.713011] env[63345]: DEBUG nova.compute.manager [req-89eb9f0d-fa9c-436e-9016-85c91111afcd req-99fba6bd-2303-44f0-9ee2-d6309fe9060f service nova] [instance: 11652422-9136-4453-b932-06695f9bc910] Received event network-vif-plugged-71b7616d-5472-4d3c-a8ca-6984d7c70c12 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 808.713313] env[63345]: DEBUG oslo_concurrency.lockutils [req-89eb9f0d-fa9c-436e-9016-85c91111afcd req-99fba6bd-2303-44f0-9ee2-d6309fe9060f service nova] Acquiring lock "11652422-9136-4453-b932-06695f9bc910-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.713670] env[63345]: DEBUG oslo_concurrency.lockutils [req-89eb9f0d-fa9c-436e-9016-85c91111afcd req-99fba6bd-2303-44f0-9ee2-d6309fe9060f service nova] Lock "11652422-9136-4453-b932-06695f9bc910-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.713986] env[63345]: DEBUG oslo_concurrency.lockutils [req-89eb9f0d-fa9c-436e-9016-85c91111afcd req-99fba6bd-2303-44f0-9ee2-d6309fe9060f service nova] Lock "11652422-9136-4453-b932-06695f9bc910-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.714314] env[63345]: DEBUG nova.compute.manager [req-89eb9f0d-fa9c-436e-9016-85c91111afcd req-99fba6bd-2303-44f0-9ee2-d6309fe9060f service nova] [instance: 11652422-9136-4453-b932-06695f9bc910] No waiting events found dispatching network-vif-plugged-71b7616d-5472-4d3c-a8ca-6984d7c70c12 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 808.714598] env[63345]: WARNING nova.compute.manager [req-89eb9f0d-fa9c-436e-9016-85c91111afcd req-99fba6bd-2303-44f0-9ee2-d6309fe9060f service nova] [instance: 11652422-9136-4453-b932-06695f9bc910] Received unexpected event network-vif-plugged-71b7616d-5472-4d3c-a8ca-6984d7c70c12 for instance with vm_state building and task_state spawning. [ 808.714950] env[63345]: DEBUG nova.compute.manager [req-89eb9f0d-fa9c-436e-9016-85c91111afcd req-99fba6bd-2303-44f0-9ee2-d6309fe9060f service nova] [instance: 11652422-9136-4453-b932-06695f9bc910] Received event network-changed-71b7616d-5472-4d3c-a8ca-6984d7c70c12 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 808.715192] env[63345]: DEBUG nova.compute.manager [req-89eb9f0d-fa9c-436e-9016-85c91111afcd req-99fba6bd-2303-44f0-9ee2-d6309fe9060f service nova] [instance: 11652422-9136-4453-b932-06695f9bc910] Refreshing instance network info cache due to event network-changed-71b7616d-5472-4d3c-a8ca-6984d7c70c12. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 808.715446] env[63345]: DEBUG oslo_concurrency.lockutils [req-89eb9f0d-fa9c-436e-9016-85c91111afcd req-99fba6bd-2303-44f0-9ee2-d6309fe9060f service nova] Acquiring lock "refresh_cache-11652422-9136-4453-b932-06695f9bc910" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.715598] env[63345]: DEBUG oslo_concurrency.lockutils [req-89eb9f0d-fa9c-436e-9016-85c91111afcd req-99fba6bd-2303-44f0-9ee2-d6309fe9060f service nova] Acquired lock "refresh_cache-11652422-9136-4453-b932-06695f9bc910" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.716203] env[63345]: DEBUG nova.network.neutron [req-89eb9f0d-fa9c-436e-9016-85c91111afcd req-99fba6bd-2303-44f0-9ee2-d6309fe9060f service nova] [instance: 11652422-9136-4453-b932-06695f9bc910] Refreshing network info cache for port 71b7616d-5472-4d3c-a8ca-6984d7c70c12 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 808.718086] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c3b2f2-895d-432c-9fe4-ec9a77a0c156 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.728675] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527456c8-ae3f-e835-fff1-526495feedf8/disk-0.vmdk from lease info. {{(pid=63345) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 808.728919] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527456c8-ae3f-e835-fff1-526495feedf8/disk-0.vmdk. {{(pid=63345) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 808.790911] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 808.798441] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8a15020c-f6dc-427c-8789-b539f580333d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.986035] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017115, 'name': CreateVM_Task, 'duration_secs': 0.375335} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.986035] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11652422-9136-4453-b932-06695f9bc910] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 808.986670] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.986843] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.987188] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 808.988619] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91ea8313-0e60-4573-accd-4908e1d77274 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.996026] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 808.996026] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f85776-b4ac-6db2-a43c-6ebe565688e6" [ 808.996026] env[63345]: _type = "Task" [ 808.996026] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.004173] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f85776-b4ac-6db2-a43c-6ebe565688e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.054682] env[63345]: DEBUG oslo_concurrency.lockutils [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquiring lock "fe3e2b2a-9583-482e-b69b-6c130801d7db" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.054997] env[63345]: DEBUG oslo_concurrency.lockutils [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "fe3e2b2a-9583-482e-b69b-6c130801d7db" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.055242] env[63345]: DEBUG oslo_concurrency.lockutils [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquiring lock "fe3e2b2a-9583-482e-b69b-6c130801d7db-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.055466] env[63345]: DEBUG oslo_concurrency.lockutils [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "fe3e2b2a-9583-482e-b69b-6c130801d7db-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.055638] env[63345]: DEBUG oslo_concurrency.lockutils [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "fe3e2b2a-9583-482e-b69b-6c130801d7db-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.063017] env[63345]: INFO nova.compute.manager [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Terminating instance [ 809.111963] env[63345]: DEBUG nova.compute.manager [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 809.137348] env[63345]: DEBUG nova.scheduler.client.report [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 809.158932] env[63345]: DEBUG nova.virt.hardware [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 809.159210] env[63345]: DEBUG nova.virt.hardware [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 809.159375] env[63345]: DEBUG nova.virt.hardware [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 809.159567] env[63345]: DEBUG nova.virt.hardware [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 809.160966] env[63345]: DEBUG nova.virt.hardware [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 809.160966] env[63345]: DEBUG nova.virt.hardware [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 809.160966] env[63345]: DEBUG nova.virt.hardware [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 809.160966] env[63345]: DEBUG nova.virt.hardware [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 809.160966] env[63345]: DEBUG nova.virt.hardware [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 809.160966] env[63345]: DEBUG nova.virt.hardware [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 809.161761] env[63345]: DEBUG nova.virt.hardware [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 809.162020] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df3443b8-6ade-4c75-8f69-1020e771fa3e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.181421] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa11820-2a0f-44be-b338-940e66aacd24 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.508575] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f85776-b4ac-6db2-a43c-6ebe565688e6, 'name': SearchDatastore_Task, 'duration_secs': 0.054869} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.508575] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.508654] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 809.508937] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.509290] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.509555] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 809.509831] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f98a5885-e536-426d-a20b-44d5d17d38b6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.534283] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 809.534554] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 809.535343] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6caa85f-d16a-4de9-bf63-572a7a760bcc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.543136] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 809.543136] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]524326c1-25b2-5dc3-7e4f-d91bf902df5f" [ 809.543136] env[63345]: _type = "Task" [ 809.543136] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.554540] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524326c1-25b2-5dc3-7e4f-d91bf902df5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.569670] env[63345]: DEBUG nova.compute.manager [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 809.570075] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 809.574214] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c68056d-e856-4465-a0f1-d56d35595661 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.580035] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 809.580337] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36012540-13d9-4403-a1f0-e74d49424133 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.589415] env[63345]: DEBUG oslo_vmware.api [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 809.589415] env[63345]: value = "task-1017116" [ 809.589415] env[63345]: _type = "Task" [ 809.589415] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.597558] env[63345]: DEBUG oslo_vmware.api [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017116, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.645976] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.599s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.646679] env[63345]: DEBUG nova.compute.manager [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 809.651787] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Completed reading data from the image iterator. {{(pid=63345) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 809.653149] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527456c8-ae3f-e835-fff1-526495feedf8/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 809.653231] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.816s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.654959] env[63345]: INFO nova.compute.claims [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 809.659038] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57069a85-8d94-4b7c-81de-fcb4e3f7afa9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.666776] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527456c8-ae3f-e835-fff1-526495feedf8/disk-0.vmdk is in state: ready. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 809.667025] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527456c8-ae3f-e835-fff1-526495feedf8/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 809.667236] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-d77c1e9e-411d-423d-ac6b-5590a4e8ecb4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.710332] env[63345]: DEBUG oslo_concurrency.lockutils [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Acquiring lock "00c58889-75f7-4a4b-a5a3-a45723c1f495" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.711023] env[63345]: DEBUG oslo_concurrency.lockutils [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Lock "00c58889-75f7-4a4b-a5a3-a45723c1f495" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.711023] env[63345]: DEBUG oslo_concurrency.lockutils [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Acquiring lock "00c58889-75f7-4a4b-a5a3-a45723c1f495-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.711897] env[63345]: DEBUG oslo_concurrency.lockutils [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Lock "00c58889-75f7-4a4b-a5a3-a45723c1f495-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.711897] env[63345]: DEBUG oslo_concurrency.lockutils [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Lock "00c58889-75f7-4a4b-a5a3-a45723c1f495-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.715013] env[63345]: INFO nova.compute.manager [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Terminating instance [ 809.857874] env[63345]: DEBUG nova.network.neutron [req-cfbded29-86cf-4bb6-91f4-88a1d189af64 req-95945bb4-5493-4b64-b67f-8622cc0a6529 service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Updated VIF entry in instance network info cache for port c6c991f3-51b9-4502-af97-3ca846db3c73. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 809.858276] env[63345]: DEBUG nova.network.neutron [req-cfbded29-86cf-4bb6-91f4-88a1d189af64 req-95945bb4-5493-4b64-b67f-8622cc0a6529 service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Updating instance_info_cache with network_info: [{"id": "c6c991f3-51b9-4502-af97-3ca846db3c73", "address": "fa:16:3e:87:40:da", "network": {"id": "e38fba0e-9c96-4a09-b0f0-08546e52eba6", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-944344960-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "307c1bea8f6d47ddb4d5ebac8bba25ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721e64ee-fc02-4eb5-9c8c-ea55647a1b92", "external-id": "nsx-vlan-transportzone-621", "segmentation_id": 621, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6c991f3-51", "ovs_interfaceid": "c6c991f3-51b9-4502-af97-3ca846db3c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.868394] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527456c8-ae3f-e835-fff1-526495feedf8/disk-0.vmdk. {{(pid=63345) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 809.868636] env[63345]: INFO nova.virt.vmwareapi.images [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Downloaded image file data f0cb364f-cc7f-4213-88ce-b8773612e90e [ 809.869618] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6df7c4a-4d46-4f7f-a23d-7bf73e0462e0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.873291] env[63345]: DEBUG nova.network.neutron [req-89eb9f0d-fa9c-436e-9016-85c91111afcd req-99fba6bd-2303-44f0-9ee2-d6309fe9060f service nova] [instance: 11652422-9136-4453-b932-06695f9bc910] Updated VIF entry in instance network info cache for port 71b7616d-5472-4d3c-a8ca-6984d7c70c12. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 809.873668] env[63345]: DEBUG nova.network.neutron [req-89eb9f0d-fa9c-436e-9016-85c91111afcd req-99fba6bd-2303-44f0-9ee2-d6309fe9060f service nova] [instance: 11652422-9136-4453-b932-06695f9bc910] Updating instance_info_cache with network_info: [{"id": "71b7616d-5472-4d3c-a8ca-6984d7c70c12", "address": "fa:16:3e:ba:da:55", "network": {"id": "80bb8388-e130-46af-a4fc-1daea51d1bf5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1343573007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "964cee117b3c4601b3afe82a8bb9c23e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71b7616d-54", "ovs_interfaceid": "71b7616d-5472-4d3c-a8ca-6984d7c70c12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.889566] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b4163884-4876-46dd-aadd-2c132cbea420 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.925928] env[63345]: INFO nova.virt.vmwareapi.images [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] The imported VM was unregistered [ 809.928357] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Caching image {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 809.928590] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Creating directory with path [datastore2] devstack-image-cache_base/f0cb364f-cc7f-4213-88ce-b8773612e90e {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 809.928939] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fac12125-40d4-41bf-9eb3-5f451112dc85 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.938301] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Created directory with path [datastore2] devstack-image-cache_base/f0cb364f-cc7f-4213-88ce-b8773612e90e {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 809.938490] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_fe7cd087-e654-4d71-a059-e388f586fc9a/OSTACK_IMG_fe7cd087-e654-4d71-a059-e388f586fc9a.vmdk to [datastore2] devstack-image-cache_base/f0cb364f-cc7f-4213-88ce-b8773612e90e/f0cb364f-cc7f-4213-88ce-b8773612e90e.vmdk. {{(pid=63345) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 809.938725] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-19b7c196-2201-4be4-a525-4b70f4ee52f8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.945321] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 809.945321] env[63345]: value = "task-1017118" [ 809.945321] env[63345]: _type = "Task" [ 809.945321] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.952890] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017118, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.056018] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524326c1-25b2-5dc3-7e4f-d91bf902df5f, 'name': SearchDatastore_Task, 'duration_secs': 0.037561} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.056018] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bc13721-068b-4c65-af59-0c562b473899 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.060654] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 810.060654] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5241edce-d729-5f43-2e56-06cfb6e229c1" [ 810.060654] env[63345]: _type = "Task" [ 810.060654] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.072456] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5241edce-d729-5f43-2e56-06cfb6e229c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.099986] env[63345]: DEBUG oslo_vmware.api [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017116, 'name': PowerOffVM_Task, 'duration_secs': 0.212851} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.106555] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 810.106861] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 810.107096] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a0a15308-b6ce-450a-a176-a9d080968029 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.169538] env[63345]: DEBUG nova.compute.utils [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 810.169899] env[63345]: DEBUG nova.compute.manager [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 810.170168] env[63345]: DEBUG nova.network.neutron [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 810.178659] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 810.178935] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Deleting contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 810.179164] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Deleting the datastore file [datastore1] fe3e2b2a-9583-482e-b69b-6c130801d7db {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 810.180167] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44f9c45c-3d93-4ef2-b361-f9de74014c40 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.187043] env[63345]: DEBUG oslo_vmware.api [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 810.187043] env[63345]: value = "task-1017120" [ 810.187043] env[63345]: _type = "Task" [ 810.187043] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.195752] env[63345]: DEBUG oslo_vmware.api [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017120, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.222042] env[63345]: DEBUG nova.compute.manager [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 810.222748] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 810.223322] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7697c0f2-76a7-4f94-877d-b1c02b8f515c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.243339] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 810.243339] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6efde037-558b-4eac-b76a-c0fc0dd344ad {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.249846] env[63345]: DEBUG oslo_vmware.api [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for the task: (returnval){ [ 810.249846] env[63345]: value = "task-1017121" [ 810.249846] env[63345]: _type = "Task" [ 810.249846] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.251084] env[63345]: DEBUG nova.policy [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f522b76a59a649a0a8570a4e8b8da753', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2b389a73e7804452b23d8c00bedd0362', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 810.268040] env[63345]: DEBUG oslo_vmware.api [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017121, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.361578] env[63345]: DEBUG oslo_concurrency.lockutils [req-cfbded29-86cf-4bb6-91f4-88a1d189af64 req-95945bb4-5493-4b64-b67f-8622cc0a6529 service nova] Releasing lock "refresh_cache-00c58889-75f7-4a4b-a5a3-a45723c1f495" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.361891] env[63345]: DEBUG nova.compute.manager [req-cfbded29-86cf-4bb6-91f4-88a1d189af64 req-95945bb4-5493-4b64-b67f-8622cc0a6529 service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Received event network-changed-5c61daf5-e16c-4171-8fbf-a8d0108d4a21 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 810.362147] env[63345]: DEBUG nova.compute.manager [req-cfbded29-86cf-4bb6-91f4-88a1d189af64 req-95945bb4-5493-4b64-b67f-8622cc0a6529 service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Refreshing instance network info cache due to event network-changed-5c61daf5-e16c-4171-8fbf-a8d0108d4a21. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 810.362456] env[63345]: DEBUG oslo_concurrency.lockutils [req-cfbded29-86cf-4bb6-91f4-88a1d189af64 req-95945bb4-5493-4b64-b67f-8622cc0a6529 service nova] Acquiring lock "refresh_cache-fe3e2b2a-9583-482e-b69b-6c130801d7db" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.362604] env[63345]: DEBUG oslo_concurrency.lockutils [req-cfbded29-86cf-4bb6-91f4-88a1d189af64 req-95945bb4-5493-4b64-b67f-8622cc0a6529 service nova] Acquired lock "refresh_cache-fe3e2b2a-9583-482e-b69b-6c130801d7db" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.362776] env[63345]: DEBUG nova.network.neutron [req-cfbded29-86cf-4bb6-91f4-88a1d189af64 req-95945bb4-5493-4b64-b67f-8622cc0a6529 service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Refreshing network info cache for port 5c61daf5-e16c-4171-8fbf-a8d0108d4a21 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 810.376993] env[63345]: DEBUG oslo_concurrency.lockutils [req-89eb9f0d-fa9c-436e-9016-85c91111afcd req-99fba6bd-2303-44f0-9ee2-d6309fe9060f service nova] Releasing lock "refresh_cache-11652422-9136-4453-b932-06695f9bc910" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.457464] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017118, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.571150] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5241edce-d729-5f43-2e56-06cfb6e229c1, 'name': SearchDatastore_Task, 'duration_secs': 0.009062} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.571522] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.571738] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 11652422-9136-4453-b932-06695f9bc910/11652422-9136-4453-b932-06695f9bc910.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 810.572236] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-158fcfd9-b25b-433f-bf5b-68f23060077b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.579999] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 810.579999] env[63345]: value = "task-1017122" [ 810.579999] env[63345]: _type = "Task" [ 810.579999] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.589044] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017122, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.674139] env[63345]: DEBUG nova.compute.manager [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 810.701804] env[63345]: DEBUG oslo_vmware.api [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017120, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.265257} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.710398] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 810.710622] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Deleted contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 810.710898] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 810.713509] env[63345]: INFO nova.compute.manager [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Took 1.14 seconds to destroy the instance on the hypervisor. [ 810.713509] env[63345]: DEBUG oslo.service.loopingcall [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 810.713509] env[63345]: DEBUG nova.compute.manager [-] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 810.713509] env[63345]: DEBUG nova.network.neutron [-] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 810.737505] env[63345]: DEBUG nova.compute.manager [req-68dd69aa-665d-4505-bdd6-2ed09b72f640 req-a0580669-a60a-424a-a7b4-19bdf2327f08 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Received event network-vif-plugged-025d1e18-19a3-43ce-9db9-1590137a5544 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 810.737933] env[63345]: DEBUG oslo_concurrency.lockutils [req-68dd69aa-665d-4505-bdd6-2ed09b72f640 req-a0580669-a60a-424a-a7b4-19bdf2327f08 service nova] Acquiring lock "9aa651b8-317d-4153-8c33-9df0a5d16115-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.738178] env[63345]: DEBUG oslo_concurrency.lockutils [req-68dd69aa-665d-4505-bdd6-2ed09b72f640 req-a0580669-a60a-424a-a7b4-19bdf2327f08 service nova] Lock "9aa651b8-317d-4153-8c33-9df0a5d16115-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.738395] env[63345]: DEBUG oslo_concurrency.lockutils [req-68dd69aa-665d-4505-bdd6-2ed09b72f640 req-a0580669-a60a-424a-a7b4-19bdf2327f08 service nova] Lock "9aa651b8-317d-4153-8c33-9df0a5d16115-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.738732] env[63345]: DEBUG nova.compute.manager [req-68dd69aa-665d-4505-bdd6-2ed09b72f640 req-a0580669-a60a-424a-a7b4-19bdf2327f08 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] No waiting events found dispatching network-vif-plugged-025d1e18-19a3-43ce-9db9-1590137a5544 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 810.738732] env[63345]: WARNING nova.compute.manager [req-68dd69aa-665d-4505-bdd6-2ed09b72f640 req-a0580669-a60a-424a-a7b4-19bdf2327f08 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Received unexpected event network-vif-plugged-025d1e18-19a3-43ce-9db9-1590137a5544 for instance with vm_state building and task_state spawning. [ 810.765137] env[63345]: DEBUG oslo_vmware.api [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017121, 'name': PowerOffVM_Task, 'duration_secs': 0.242953} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.768236] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 810.768558] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 810.769210] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7db418ed-6a47-4ca0-a2e8-7458c91163d4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.802737] env[63345]: DEBUG nova.network.neutron [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Successfully created port: e0d9c52f-00fe-4c7a-9301-348d5c2c56cf {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 810.869575] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 810.869842] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Deleting contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 810.870095] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Deleting the datastore file [datastore1] 00c58889-75f7-4a4b-a5a3-a45723c1f495 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 810.874695] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5057401-3a12-4919-afc7-e7b1e7b6b2a1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.883433] env[63345]: DEBUG oslo_vmware.api [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for the task: (returnval){ [ 810.883433] env[63345]: value = "task-1017124" [ 810.883433] env[63345]: _type = "Task" [ 810.883433] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.892912] env[63345]: DEBUG oslo_vmware.api [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017124, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.962578] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017118, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.095231] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017122, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.159347] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442ee360-a5e9-41d1-9637-93e8f76d0d14 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.168222] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5a3d7e-4f1f-4083-88fd-e389c1509b28 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.216408] env[63345]: DEBUG nova.network.neutron [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Successfully updated port: 025d1e18-19a3-43ce-9db9-1590137a5544 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 811.217843] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ebd2ebf-05fc-4f94-9770-7360f08dff1c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.230536] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae8e416-5e03-4331-922e-919f7c502b4a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.249283] env[63345]: DEBUG nova.compute.provider_tree [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 811.395426] env[63345]: DEBUG oslo_vmware.api [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Task: {'id': task-1017124, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.404077} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.395701] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 811.395897] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Deleted contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 811.396189] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 811.396385] env[63345]: INFO nova.compute.manager [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Took 1.17 seconds to destroy the instance on the hypervisor. [ 811.397046] env[63345]: DEBUG oslo.service.loopingcall [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 811.397128] env[63345]: DEBUG nova.compute.manager [-] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 811.397409] env[63345]: DEBUG nova.network.neutron [-] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 811.425638] env[63345]: DEBUG nova.compute.manager [req-b2698003-4e9b-46d2-a540-bf9e827e70a1 req-21bc0b57-5304-4281-90b9-6ec0ae703b8a service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Received event network-changed-025d1e18-19a3-43ce-9db9-1590137a5544 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 811.425906] env[63345]: DEBUG nova.compute.manager [req-b2698003-4e9b-46d2-a540-bf9e827e70a1 req-21bc0b57-5304-4281-90b9-6ec0ae703b8a service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Refreshing instance network info cache due to event network-changed-025d1e18-19a3-43ce-9db9-1590137a5544. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 811.426264] env[63345]: DEBUG oslo_concurrency.lockutils [req-b2698003-4e9b-46d2-a540-bf9e827e70a1 req-21bc0b57-5304-4281-90b9-6ec0ae703b8a service nova] Acquiring lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.426425] env[63345]: DEBUG oslo_concurrency.lockutils [req-b2698003-4e9b-46d2-a540-bf9e827e70a1 req-21bc0b57-5304-4281-90b9-6ec0ae703b8a service nova] Acquired lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.426674] env[63345]: DEBUG nova.network.neutron [req-b2698003-4e9b-46d2-a540-bf9e827e70a1 req-21bc0b57-5304-4281-90b9-6ec0ae703b8a service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Refreshing network info cache for port 025d1e18-19a3-43ce-9db9-1590137a5544 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 811.463643] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017118, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.594308] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017122, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.658316] env[63345]: DEBUG nova.network.neutron [req-cfbded29-86cf-4bb6-91f4-88a1d189af64 req-95945bb4-5493-4b64-b67f-8622cc0a6529 service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Updated VIF entry in instance network info cache for port 5c61daf5-e16c-4171-8fbf-a8d0108d4a21. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 811.658767] env[63345]: DEBUG nova.network.neutron [req-cfbded29-86cf-4bb6-91f4-88a1d189af64 req-95945bb4-5493-4b64-b67f-8622cc0a6529 service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Updating instance_info_cache with network_info: [{"id": "5c61daf5-e16c-4171-8fbf-a8d0108d4a21", "address": "fa:16:3e:5b:b7:f5", "network": {"id": "441f27c7-de99-494b-9db5-8e67e3c8e7b6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-592603355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f0343855b6147f38b0cb3f2c72330e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c61daf5-e1", "ovs_interfaceid": "5c61daf5-e16c-4171-8fbf-a8d0108d4a21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.719456] env[63345]: DEBUG nova.compute.manager [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 811.727470] env[63345]: DEBUG nova.compute.manager [req-99fcb6cd-21be-4f7e-b583-c8441c979be2 req-0d4c4984-1681-45b8-abad-fd50e713d579 service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Received event network-vif-deleted-5c61daf5-e16c-4171-8fbf-a8d0108d4a21 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 811.727782] env[63345]: INFO nova.compute.manager [req-99fcb6cd-21be-4f7e-b583-c8441c979be2 req-0d4c4984-1681-45b8-abad-fd50e713d579 service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Neutron deleted interface 5c61daf5-e16c-4171-8fbf-a8d0108d4a21; detaching it from the instance and deleting it from the info cache [ 811.727846] env[63345]: DEBUG nova.network.neutron [req-99fcb6cd-21be-4f7e-b583-c8441c979be2 req-0d4c4984-1681-45b8-abad-fd50e713d579 service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.729779] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.754406] env[63345]: DEBUG nova.scheduler.client.report [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 811.864450] env[63345]: DEBUG nova.network.neutron [-] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.921600] env[63345]: DEBUG nova.virt.hardware [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 811.921600] env[63345]: DEBUG nova.virt.hardware [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 811.921600] env[63345]: DEBUG nova.virt.hardware [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 811.922038] env[63345]: DEBUG nova.virt.hardware [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 811.922038] env[63345]: DEBUG nova.virt.hardware [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 811.922195] env[63345]: DEBUG nova.virt.hardware [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 811.922427] env[63345]: DEBUG nova.virt.hardware [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 811.922596] env[63345]: DEBUG nova.virt.hardware [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 811.922772] env[63345]: DEBUG nova.virt.hardware [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 811.922941] env[63345]: DEBUG nova.virt.hardware [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 811.923137] env[63345]: DEBUG nova.virt.hardware [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 811.924492] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01dc60e-ed82-41d7-a568-f70860f96b8e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.935360] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36044bcb-4c93-472f-8e25-674d0db92cbd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.962948] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017118, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.970454] env[63345]: DEBUG nova.network.neutron [req-b2698003-4e9b-46d2-a540-bf9e827e70a1 req-21bc0b57-5304-4281-90b9-6ec0ae703b8a service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.074246] env[63345]: DEBUG nova.network.neutron [req-b2698003-4e9b-46d2-a540-bf9e827e70a1 req-21bc0b57-5304-4281-90b9-6ec0ae703b8a service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.098386] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017122, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.162956] env[63345]: DEBUG oslo_concurrency.lockutils [req-cfbded29-86cf-4bb6-91f4-88a1d189af64 req-95945bb4-5493-4b64-b67f-8622cc0a6529 service nova] Releasing lock "refresh_cache-fe3e2b2a-9583-482e-b69b-6c130801d7db" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.215427] env[63345]: DEBUG nova.network.neutron [-] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.233822] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c1dba907-1f56-4129-bd1b-d52b53daf347 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.243218] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff906c9-71d4-47b1-9da5-4315b605692d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.263021] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.607s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.263021] env[63345]: DEBUG nova.compute.manager [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 812.264627] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.613s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.265026] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.268381] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.227s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.268749] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.269068] env[63345]: INFO nova.compute.manager [None req-a99cbd47-599e-4848-9eb3-db28155cf89b tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Successfully reverted task state from rebuilding on failure for instance. [ 812.274520] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.220s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.276478] env[63345]: INFO nova.compute.claims [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 812.294485] env[63345]: DEBUG nova.compute.manager [req-99fcb6cd-21be-4f7e-b583-c8441c979be2 req-0d4c4984-1681-45b8-abad-fd50e713d579 service nova] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Detach interface failed, port_id=5c61daf5-e16c-4171-8fbf-a8d0108d4a21, reason: Instance fe3e2b2a-9583-482e-b69b-6c130801d7db could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 812.318887] env[63345]: INFO nova.scheduler.client.report [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Deleted allocations for instance bcec23fe-75c7-479e-9210-85ca6781d7e5 [ 812.371907] env[63345]: INFO nova.compute.manager [-] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Took 1.66 seconds to deallocate network for instance. [ 812.462696] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017118, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.582264] env[63345]: DEBUG oslo_concurrency.lockutils [req-b2698003-4e9b-46d2-a540-bf9e827e70a1 req-21bc0b57-5304-4281-90b9-6ec0ae703b8a service nova] Releasing lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.582679] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquired lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.582866] env[63345]: DEBUG nova.network.neutron [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 812.600672] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017122, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.721936] env[63345]: INFO nova.compute.manager [-] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Took 1.32 seconds to deallocate network for instance. [ 812.782774] env[63345]: DEBUG nova.compute.utils [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 812.784952] env[63345]: DEBUG nova.compute.manager [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 812.785138] env[63345]: DEBUG nova.network.neutron [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 812.833349] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b20ae33d-c68b-47fa-966f-615a993f721c tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "bcec23fe-75c7-479e-9210-85ca6781d7e5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.250s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.865075] env[63345]: DEBUG nova.policy [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '620aa8aab5b7456e8d0feda8a3d9a225', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc3b4aff33e540d79c796f98c315a05a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 812.878228] env[63345]: DEBUG oslo_concurrency.lockutils [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.967424] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017118, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.700415} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.968596] env[63345]: INFO nova.virt.vmwareapi.ds_util [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_fe7cd087-e654-4d71-a059-e388f586fc9a/OSTACK_IMG_fe7cd087-e654-4d71-a059-e388f586fc9a.vmdk to [datastore2] devstack-image-cache_base/f0cb364f-cc7f-4213-88ce-b8773612e90e/f0cb364f-cc7f-4213-88ce-b8773612e90e.vmdk. [ 812.971016] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Cleaning up location [datastore2] OSTACK_IMG_fe7cd087-e654-4d71-a059-e388f586fc9a {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 812.971016] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_fe7cd087-e654-4d71-a059-e388f586fc9a {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 812.971016] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a144587c-9b69-4660-b840-157b0ef34370 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.977697] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 812.977697] env[63345]: value = "task-1017125" [ 812.977697] env[63345]: _type = "Task" [ 812.977697] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.992787] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017125, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.099652] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017122, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.070561} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.099652] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 11652422-9136-4453-b932-06695f9bc910/11652422-9136-4453-b932-06695f9bc910.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 813.099861] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 813.100408] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a63686eb-5b71-4ddb-87c7-30711a25fdc7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.110166] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 813.110166] env[63345]: value = "task-1017126" [ 813.110166] env[63345]: _type = "Task" [ 813.110166] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.123221] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017126, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.150034] env[63345]: DEBUG nova.network.neutron [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Successfully updated port: e0d9c52f-00fe-4c7a-9301-348d5c2c56cf {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 813.181570] env[63345]: DEBUG nova.network.neutron [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 813.233441] env[63345]: DEBUG oslo_concurrency.lockutils [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.288102] env[63345]: DEBUG nova.compute.manager [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 813.429348] env[63345]: DEBUG nova.network.neutron [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Successfully created port: 9682a3c3-bc5f-4d38-998f-d008a47b824e {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 813.488961] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017125, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.045902} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.491800] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 813.492204] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f0cb364f-cc7f-4213-88ce-b8773612e90e/f0cb364f-cc7f-4213-88ce-b8773612e90e.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.492703] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f0cb364f-cc7f-4213-88ce-b8773612e90e/f0cb364f-cc7f-4213-88ce-b8773612e90e.vmdk to [datastore2] 4f108dcc-c130-4c3f-840d-7a912150db3f/4f108dcc-c130-4c3f-840d-7a912150db3f.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 813.493437] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9dbac951-cfe1-4da9-a859-2b5e5ca3242c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.500583] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 813.500583] env[63345]: value = "task-1017127" [ 813.500583] env[63345]: _type = "Task" [ 813.500583] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.511583] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017127, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.558604] env[63345]: DEBUG nova.network.neutron [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Updating instance_info_cache with network_info: [{"id": "025d1e18-19a3-43ce-9db9-1590137a5544", "address": "fa:16:3e:9b:36:a9", "network": {"id": "95d95c9b-b21c-4ee5-ab54-d0bf2699d38e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-88421441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba08f64c26d245a8b8f2b52ea97c2f1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap025d1e18-19", "ovs_interfaceid": "025d1e18-19a3-43ce-9db9-1590137a5544", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.625711] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017126, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069014} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.626775] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 813.628106] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74c185d-3c67-4719-b822-2e8f51c0628c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.657088] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 11652422-9136-4453-b932-06695f9bc910/11652422-9136-4453-b932-06695f9bc910.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 813.660399] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "refresh_cache-a85688b0-d68f-4370-bd95-dc9fb1d2c26a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.660600] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired lock "refresh_cache-a85688b0-d68f-4370-bd95-dc9fb1d2c26a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.660762] env[63345]: DEBUG nova.network.neutron [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 813.662306] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b014007-1346-4693-acbd-0c92f1e29c15 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.686778] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 813.686778] env[63345]: value = "task-1017128" [ 813.686778] env[63345]: _type = "Task" [ 813.686778] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.699742] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017128, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.722027] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9caa8603-b152-4374-a8bb-b4842ae64e97 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.730463] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b735f90-44bc-437f-882c-cd8f2872bfe9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.767119] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ee2df7-17af-4f0a-aab7-2e56ec1f3b67 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.775600] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-894558df-6492-46b8-a346-b6327ff78665 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.798222] env[63345]: DEBUG nova.compute.provider_tree [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 814.007441] env[63345]: DEBUG nova.compute.manager [req-c5fa5ed4-2b0a-43f6-8ff0-d4c1d06efb89 req-f80f8e5d-cb9e-4a19-bda8-a782fd8779db service nova] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Received event network-vif-deleted-c6c991f3-51b9-4502-af97-3ca846db3c73 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 814.007441] env[63345]: DEBUG nova.compute.manager [req-c5fa5ed4-2b0a-43f6-8ff0-d4c1d06efb89 req-f80f8e5d-cb9e-4a19-bda8-a782fd8779db service nova] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Received event network-vif-plugged-e0d9c52f-00fe-4c7a-9301-348d5c2c56cf {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 814.007441] env[63345]: DEBUG oslo_concurrency.lockutils [req-c5fa5ed4-2b0a-43f6-8ff0-d4c1d06efb89 req-f80f8e5d-cb9e-4a19-bda8-a782fd8779db service nova] Acquiring lock "a85688b0-d68f-4370-bd95-dc9fb1d2c26a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.007441] env[63345]: DEBUG oslo_concurrency.lockutils [req-c5fa5ed4-2b0a-43f6-8ff0-d4c1d06efb89 req-f80f8e5d-cb9e-4a19-bda8-a782fd8779db service nova] Lock "a85688b0-d68f-4370-bd95-dc9fb1d2c26a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.007441] env[63345]: DEBUG oslo_concurrency.lockutils [req-c5fa5ed4-2b0a-43f6-8ff0-d4c1d06efb89 req-f80f8e5d-cb9e-4a19-bda8-a782fd8779db service nova] Lock "a85688b0-d68f-4370-bd95-dc9fb1d2c26a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.007441] env[63345]: DEBUG nova.compute.manager [req-c5fa5ed4-2b0a-43f6-8ff0-d4c1d06efb89 req-f80f8e5d-cb9e-4a19-bda8-a782fd8779db service nova] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] No waiting events found dispatching network-vif-plugged-e0d9c52f-00fe-4c7a-9301-348d5c2c56cf {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 814.007441] env[63345]: WARNING nova.compute.manager [req-c5fa5ed4-2b0a-43f6-8ff0-d4c1d06efb89 req-f80f8e5d-cb9e-4a19-bda8-a782fd8779db service nova] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Received unexpected event network-vif-plugged-e0d9c52f-00fe-4c7a-9301-348d5c2c56cf for instance with vm_state building and task_state spawning. [ 814.007441] env[63345]: DEBUG nova.compute.manager [req-c5fa5ed4-2b0a-43f6-8ff0-d4c1d06efb89 req-f80f8e5d-cb9e-4a19-bda8-a782fd8779db service nova] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Received event network-changed-e0d9c52f-00fe-4c7a-9301-348d5c2c56cf {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 814.007441] env[63345]: DEBUG nova.compute.manager [req-c5fa5ed4-2b0a-43f6-8ff0-d4c1d06efb89 req-f80f8e5d-cb9e-4a19-bda8-a782fd8779db service nova] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Refreshing instance network info cache due to event network-changed-e0d9c52f-00fe-4c7a-9301-348d5c2c56cf. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 814.007441] env[63345]: DEBUG oslo_concurrency.lockutils [req-c5fa5ed4-2b0a-43f6-8ff0-d4c1d06efb89 req-f80f8e5d-cb9e-4a19-bda8-a782fd8779db service nova] Acquiring lock "refresh_cache-a85688b0-d68f-4370-bd95-dc9fb1d2c26a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.019207] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017127, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.061812] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Releasing lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.062346] env[63345]: DEBUG nova.compute.manager [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Instance network_info: |[{"id": "025d1e18-19a3-43ce-9db9-1590137a5544", "address": "fa:16:3e:9b:36:a9", "network": {"id": "95d95c9b-b21c-4ee5-ab54-d0bf2699d38e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-88421441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba08f64c26d245a8b8f2b52ea97c2f1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap025d1e18-19", "ovs_interfaceid": "025d1e18-19a3-43ce-9db9-1590137a5544", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 814.065264] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:36:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '025d1e18-19a3-43ce-9db9-1590137a5544', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 814.077581] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Creating folder: Project (ba08f64c26d245a8b8f2b52ea97c2f1a). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 814.078174] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1781d34f-40f1-4ba1-ab6d-f7bc85b819f8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.090943] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Created folder: Project (ba08f64c26d245a8b8f2b52ea97c2f1a) in parent group-v225918. [ 814.091825] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Creating folder: Instances. Parent ref: group-v226045. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 814.091825] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-133c0c41-0b67-4db9-a1ac-16ca8e7d0e0e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.101836] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Created folder: Instances in parent group-v226045. [ 814.102191] env[63345]: DEBUG oslo.service.loopingcall [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 814.102401] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 814.102671] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a18902aa-9d5d-4e73-99ee-6dd9447b9efd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.119147] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Acquiring lock "f043239f-7158-4199-a784-d711a5a301be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.120061] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Lock "f043239f-7158-4199-a784-d711a5a301be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.120061] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Acquiring lock "f043239f-7158-4199-a784-d711a5a301be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.120061] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Lock "f043239f-7158-4199-a784-d711a5a301be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.120061] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Lock "f043239f-7158-4199-a784-d711a5a301be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.122694] env[63345]: INFO nova.compute.manager [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Terminating instance [ 814.130644] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 814.130644] env[63345]: value = "task-1017131" [ 814.130644] env[63345]: _type = "Task" [ 814.130644] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.142395] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017131, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.199434] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.248694] env[63345]: DEBUG nova.network.neutron [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 814.305356] env[63345]: DEBUG nova.compute.manager [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 814.350407] env[63345]: DEBUG nova.virt.hardware [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 814.350407] env[63345]: DEBUG nova.virt.hardware [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 814.350769] env[63345]: DEBUG nova.virt.hardware [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 814.350848] env[63345]: DEBUG nova.virt.hardware [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 814.351037] env[63345]: DEBUG nova.virt.hardware [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 814.351232] env[63345]: DEBUG nova.virt.hardware [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 814.351502] env[63345]: DEBUG nova.virt.hardware [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 814.351692] env[63345]: DEBUG nova.virt.hardware [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 814.351967] env[63345]: DEBUG nova.virt.hardware [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 814.352174] env[63345]: DEBUG nova.virt.hardware [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 814.352366] env[63345]: DEBUG nova.virt.hardware [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 814.353402] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292b23bd-93df-433c-a647-472275d5d688 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.358141] env[63345]: DEBUG nova.scheduler.client.report [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 93 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 814.358415] env[63345]: DEBUG nova.compute.provider_tree [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 93 to 94 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 814.358631] env[63345]: DEBUG nova.compute.provider_tree [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 814.369310] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc2aabc-2945-49ee-863e-90d219fcbf78 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.514221] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017127, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.585472] env[63345]: DEBUG nova.network.neutron [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Updating instance_info_cache with network_info: [{"id": "e0d9c52f-00fe-4c7a-9301-348d5c2c56cf", "address": "fa:16:3e:f0:4e:4e", "network": {"id": "ce89b46a-97ec-4f2d-be39-333e9fcf307d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-416012078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b389a73e7804452b23d8c00bedd0362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d9c52f-00", "ovs_interfaceid": "e0d9c52f-00fe-4c7a-9301-348d5c2c56cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.629344] env[63345]: DEBUG nova.compute.manager [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 814.629654] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 814.630724] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f25b752-e539-4f50-90e2-0e04f0c980a0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.653381] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 814.653493] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017131, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.653736] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43b0efd9-87dd-492a-b0e6-c788da0fe05a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.662315] env[63345]: DEBUG oslo_vmware.api [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Waiting for the task: (returnval){ [ 814.662315] env[63345]: value = "task-1017132" [ 814.662315] env[63345]: _type = "Task" [ 814.662315] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.683071] env[63345]: DEBUG oslo_vmware.api [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': task-1017132, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.705101] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017128, 'name': ReconfigVM_Task, 'duration_secs': 0.785126} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.705650] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 11652422-9136-4453-b932-06695f9bc910/11652422-9136-4453-b932-06695f9bc910.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 814.706430] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b6e12ff-5fac-41d0-92c9-2549221d7d5a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.715019] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 814.715019] env[63345]: value = "task-1017133" [ 814.715019] env[63345]: _type = "Task" [ 814.715019] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.728702] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017133, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.864935] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.590s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.865976] env[63345]: DEBUG nova.compute.manager [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 814.869293] env[63345]: DEBUG oslo_concurrency.lockutils [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.396s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.870850] env[63345]: INFO nova.compute.claims [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 815.016175] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017127, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.087809] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Releasing lock "refresh_cache-a85688b0-d68f-4370-bd95-dc9fb1d2c26a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.088199] env[63345]: DEBUG nova.compute.manager [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Instance network_info: |[{"id": "e0d9c52f-00fe-4c7a-9301-348d5c2c56cf", "address": "fa:16:3e:f0:4e:4e", "network": {"id": "ce89b46a-97ec-4f2d-be39-333e9fcf307d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-416012078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b389a73e7804452b23d8c00bedd0362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d9c52f-00", "ovs_interfaceid": "e0d9c52f-00fe-4c7a-9301-348d5c2c56cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 815.088550] env[63345]: DEBUG oslo_concurrency.lockutils [req-c5fa5ed4-2b0a-43f6-8ff0-d4c1d06efb89 req-f80f8e5d-cb9e-4a19-bda8-a782fd8779db service nova] Acquired lock "refresh_cache-a85688b0-d68f-4370-bd95-dc9fb1d2c26a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.088745] env[63345]: DEBUG nova.network.neutron [req-c5fa5ed4-2b0a-43f6-8ff0-d4c1d06efb89 req-f80f8e5d-cb9e-4a19-bda8-a782fd8779db service nova] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Refreshing network info cache for port e0d9c52f-00fe-4c7a-9301-348d5c2c56cf {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 815.090046] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:4e:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bd3c6b64-aba2-4bdc-a693-3b4dff3ed861', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e0d9c52f-00fe-4c7a-9301-348d5c2c56cf', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 815.102703] env[63345]: DEBUG oslo.service.loopingcall [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 815.103258] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 815.104111] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c27e10b-7ac7-463c-a198-2fd38cdea152 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.130227] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 815.130227] env[63345]: value = "task-1017134" [ 815.130227] env[63345]: _type = "Task" [ 815.130227] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.143690] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017134, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.149441] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017131, 'name': CreateVM_Task, 'duration_secs': 0.61711} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.149575] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 815.150264] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.150598] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.150763] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 815.151134] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54fc67cd-67a2-49f9-b9f0-68a2eefb418b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.158204] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 815.158204] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]526d494c-d7e0-87af-5c11-4a32e66d10d4" [ 815.158204] env[63345]: _type = "Task" [ 815.158204] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.172562] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526d494c-d7e0-87af-5c11-4a32e66d10d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.175859] env[63345]: DEBUG oslo_vmware.api [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': task-1017132, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.231662] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017133, 'name': Rename_Task, 'duration_secs': 0.36585} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.232024] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 815.232327] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96b26a75-c8ae-434d-aa9a-2b72331606a1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.241905] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 815.241905] env[63345]: value = "task-1017135" [ 815.241905] env[63345]: _type = "Task" [ 815.241905] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.253500] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017135, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.306183] env[63345]: DEBUG nova.network.neutron [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Successfully updated port: 9682a3c3-bc5f-4d38-998f-d008a47b824e {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 815.381076] env[63345]: DEBUG nova.compute.utils [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 815.386236] env[63345]: DEBUG nova.compute.manager [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 815.386634] env[63345]: DEBUG nova.network.neutron [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 815.469419] env[63345]: DEBUG nova.policy [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '349055b1592049cba7ea942b4eba0653', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5f090462fd22418691d9284bfa52c142', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 815.479596] env[63345]: DEBUG oslo_concurrency.lockutils [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Acquiring lock "a3f34e0e-2969-406f-a086-a925549e458e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.479870] env[63345]: DEBUG oslo_concurrency.lockutils [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Lock "a3f34e0e-2969-406f-a086-a925549e458e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.517153] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017127, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.648415] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017134, 'name': CreateVM_Task, 'duration_secs': 0.419917} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.653633] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 815.654928] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.682259] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526d494c-d7e0-87af-5c11-4a32e66d10d4, 'name': SearchDatastore_Task, 'duration_secs': 0.078432} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.690645] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.691306] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 815.692034] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.692121] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.693031] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 815.693184] env[63345]: DEBUG oslo_vmware.api [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': task-1017132, 'name': PowerOffVM_Task, 'duration_secs': 0.938355} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.694257] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.694257] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 815.694888] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d2c5db0-473c-4054-91ad-89268133aca3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.700192] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 815.700192] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 815.700793] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edb722b8-d35e-4dac-887c-b0f27bc3ea20 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.704015] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3bb3f7cf-52b8-4574-bca8-9e0f2e4643b8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.711940] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 815.711940] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52cfe4d3-513e-397e-5b23-53a3402db947" [ 815.711940] env[63345]: _type = "Task" [ 815.711940] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.724424] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52cfe4d3-513e-397e-5b23-53a3402db947, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.726029] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 815.726357] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 815.727346] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-464726f6-fdba-40f7-8c17-6c104f19ae3d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.735030] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 815.735030] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]524b4cbe-d75b-eb39-0a49-06b7576cb02b" [ 815.735030] env[63345]: _type = "Task" [ 815.735030] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.747511] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524b4cbe-d75b-eb39-0a49-06b7576cb02b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.759977] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017135, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.810367] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "refresh_cache-df2f06af-54a6-4dbd-83ff-1e4b066acbf3" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.811522] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquired lock "refresh_cache-df2f06af-54a6-4dbd-83ff-1e4b066acbf3" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.811897] env[63345]: DEBUG nova.network.neutron [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 815.826417] env[63345]: DEBUG nova.network.neutron [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Successfully created port: af759cda-f432-45a8-afdd-ead0d3533779 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 815.888076] env[63345]: DEBUG nova.compute.manager [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 815.899452] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "1e349d03-6cae-4322-9941-d48c52c21c0e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.899673] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "1e349d03-6cae-4322-9941-d48c52c21c0e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.015069] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017127, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.028528] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 816.028888] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 816.029453] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Deleting the datastore file [datastore2] f043239f-7158-4199-a784-d711a5a301be {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 816.029575] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4f8cfa8-a86f-476a-9996-ef4b8f2823b3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.041195] env[63345]: DEBUG oslo_vmware.api [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Waiting for the task: (returnval){ [ 816.041195] env[63345]: value = "task-1017137" [ 816.041195] env[63345]: _type = "Task" [ 816.041195] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.052689] env[63345]: DEBUG oslo_vmware.api [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': task-1017137, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.097033] env[63345]: DEBUG nova.network.neutron [req-c5fa5ed4-2b0a-43f6-8ff0-d4c1d06efb89 req-f80f8e5d-cb9e-4a19-bda8-a782fd8779db service nova] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Updated VIF entry in instance network info cache for port e0d9c52f-00fe-4c7a-9301-348d5c2c56cf. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 816.097615] env[63345]: DEBUG nova.network.neutron [req-c5fa5ed4-2b0a-43f6-8ff0-d4c1d06efb89 req-f80f8e5d-cb9e-4a19-bda8-a782fd8779db service nova] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Updating instance_info_cache with network_info: [{"id": "e0d9c52f-00fe-4c7a-9301-348d5c2c56cf", "address": "fa:16:3e:f0:4e:4e", "network": {"id": "ce89b46a-97ec-4f2d-be39-333e9fcf307d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-416012078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b389a73e7804452b23d8c00bedd0362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d9c52f-00", "ovs_interfaceid": "e0d9c52f-00fe-4c7a-9301-348d5c2c56cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.224976] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52cfe4d3-513e-397e-5b23-53a3402db947, 'name': SearchDatastore_Task, 'duration_secs': 0.089371} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.228188] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.228452] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 816.228665] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.244202] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524b4cbe-d75b-eb39-0a49-06b7576cb02b, 'name': SearchDatastore_Task, 'duration_secs': 0.093277} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.248288] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0b716e6-3d40-4ccb-b001-c44cf840f22a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.261391] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017135, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.265877] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 816.265877] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52964fbd-8fae-8587-3c78-7d4a47ca617c" [ 816.265877] env[63345]: _type = "Task" [ 816.265877] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.275700] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52964fbd-8fae-8587-3c78-7d4a47ca617c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.347787] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c354df-efbd-432d-99fa-12925adf77d0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.361023] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3ad451a-140e-4aee-9ea8-e386545fda51 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.391692] env[63345]: DEBUG nova.network.neutron [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 816.394212] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8e5e22-e76a-46fc-bdd8-59f92a0b79d1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.408026] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad87ee7e-dade-4af6-916c-71877032c9db {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.422933] env[63345]: DEBUG nova.compute.provider_tree [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 816.452499] env[63345]: DEBUG nova.compute.manager [req-8b382a5f-45a6-4f17-8669-8a352a6e165e req-ae5cb4b0-b7dd-4517-9fde-17d00e687b2e service nova] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Received event network-vif-plugged-9682a3c3-bc5f-4d38-998f-d008a47b824e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 816.452731] env[63345]: DEBUG oslo_concurrency.lockutils [req-8b382a5f-45a6-4f17-8669-8a352a6e165e req-ae5cb4b0-b7dd-4517-9fde-17d00e687b2e service nova] Acquiring lock "df2f06af-54a6-4dbd-83ff-1e4b066acbf3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.454236] env[63345]: DEBUG oslo_concurrency.lockutils [req-8b382a5f-45a6-4f17-8669-8a352a6e165e req-ae5cb4b0-b7dd-4517-9fde-17d00e687b2e service nova] Lock "df2f06af-54a6-4dbd-83ff-1e4b066acbf3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.454236] env[63345]: DEBUG oslo_concurrency.lockutils [req-8b382a5f-45a6-4f17-8669-8a352a6e165e req-ae5cb4b0-b7dd-4517-9fde-17d00e687b2e service nova] Lock "df2f06af-54a6-4dbd-83ff-1e4b066acbf3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.454236] env[63345]: DEBUG nova.compute.manager [req-8b382a5f-45a6-4f17-8669-8a352a6e165e req-ae5cb4b0-b7dd-4517-9fde-17d00e687b2e service nova] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] No waiting events found dispatching network-vif-plugged-9682a3c3-bc5f-4d38-998f-d008a47b824e {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 816.454236] env[63345]: WARNING nova.compute.manager [req-8b382a5f-45a6-4f17-8669-8a352a6e165e req-ae5cb4b0-b7dd-4517-9fde-17d00e687b2e service nova] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Received unexpected event network-vif-plugged-9682a3c3-bc5f-4d38-998f-d008a47b824e for instance with vm_state building and task_state spawning. [ 816.454236] env[63345]: DEBUG nova.compute.manager [req-8b382a5f-45a6-4f17-8669-8a352a6e165e req-ae5cb4b0-b7dd-4517-9fde-17d00e687b2e service nova] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Received event network-changed-9682a3c3-bc5f-4d38-998f-d008a47b824e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 816.454236] env[63345]: DEBUG nova.compute.manager [req-8b382a5f-45a6-4f17-8669-8a352a6e165e req-ae5cb4b0-b7dd-4517-9fde-17d00e687b2e service nova] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Refreshing instance network info cache due to event network-changed-9682a3c3-bc5f-4d38-998f-d008a47b824e. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 816.454236] env[63345]: DEBUG oslo_concurrency.lockutils [req-8b382a5f-45a6-4f17-8669-8a352a6e165e req-ae5cb4b0-b7dd-4517-9fde-17d00e687b2e service nova] Acquiring lock "refresh_cache-df2f06af-54a6-4dbd-83ff-1e4b066acbf3" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.516991] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017127, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.67753} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.519387] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f0cb364f-cc7f-4213-88ce-b8773612e90e/f0cb364f-cc7f-4213-88ce-b8773612e90e.vmdk to [datastore2] 4f108dcc-c130-4c3f-840d-7a912150db3f/4f108dcc-c130-4c3f-840d-7a912150db3f.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 816.520197] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b739811-e51c-46ca-ad68-a62892d8d049 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.548215] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] 4f108dcc-c130-4c3f-840d-7a912150db3f/4f108dcc-c130-4c3f-840d-7a912150db3f.vmdk or device None with type streamOptimized {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 816.548594] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3de704e1-86e5-47dd-9649-4c5af8484f96 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.577685] env[63345]: DEBUG oslo_vmware.api [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Task: {'id': task-1017137, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.430633} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.579043] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 816.579256] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 816.579441] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 816.579622] env[63345]: INFO nova.compute.manager [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] [instance: f043239f-7158-4199-a784-d711a5a301be] Took 1.95 seconds to destroy the instance on the hypervisor. [ 816.580831] env[63345]: DEBUG oslo.service.loopingcall [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 816.580831] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 816.580831] env[63345]: value = "task-1017138" [ 816.580831] env[63345]: _type = "Task" [ 816.580831] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.580831] env[63345]: DEBUG nova.compute.manager [-] [instance: f043239f-7158-4199-a784-d711a5a301be] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 816.580831] env[63345]: DEBUG nova.network.neutron [-] [instance: f043239f-7158-4199-a784-d711a5a301be] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 816.593916] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017138, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.600710] env[63345]: DEBUG oslo_concurrency.lockutils [req-c5fa5ed4-2b0a-43f6-8ff0-d4c1d06efb89 req-f80f8e5d-cb9e-4a19-bda8-a782fd8779db service nova] Releasing lock "refresh_cache-a85688b0-d68f-4370-bd95-dc9fb1d2c26a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.672064] env[63345]: DEBUG nova.network.neutron [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Updating instance_info_cache with network_info: [{"id": "9682a3c3-bc5f-4d38-998f-d008a47b824e", "address": "fa:16:3e:fa:3c:05", "network": {"id": "c9f406eb-96bc-4c63-8f76-474a8fcc4f7d", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1744519845-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc3b4aff33e540d79c796f98c315a05a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9682a3c3-bc", "ovs_interfaceid": "9682a3c3-bc5f-4d38-998f-d008a47b824e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.757802] env[63345]: DEBUG oslo_vmware.api [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017135, 'name': PowerOnVM_Task, 'duration_secs': 1.144439} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.758334] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 816.758334] env[63345]: INFO nova.compute.manager [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Took 11.44 seconds to spawn the instance on the hypervisor. [ 816.758468] env[63345]: DEBUG nova.compute.manager [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 816.761032] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3b6af4-50e8-400a-8113-71fa629b0ac4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.785383] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52964fbd-8fae-8587-3c78-7d4a47ca617c, 'name': SearchDatastore_Task, 'duration_secs': 0.029213} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.786893] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.786893] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 9aa651b8-317d-4153-8c33-9df0a5d16115/9aa651b8-317d-4153-8c33-9df0a5d16115.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 816.786893] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.786893] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 816.786893] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5835d324-503e-47ce-91b4-22c93aadef7f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.788777] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d601e3ea-4577-4d4a-a24d-5d82fd3d15bf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.797694] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 816.797694] env[63345]: value = "task-1017139" [ 816.797694] env[63345]: _type = "Task" [ 816.797694] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.802045] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 816.802045] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 816.803024] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50c8ddcf-4acc-40f3-9822-a389b8c1549a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.811151] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017139, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.816161] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 816.816161] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52fd372b-3d0f-33b0-9932-4ff0e02e4d26" [ 816.816161] env[63345]: _type = "Task" [ 816.816161] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.826322] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52fd372b-3d0f-33b0-9932-4ff0e02e4d26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.902661] env[63345]: DEBUG nova.compute.manager [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 816.926173] env[63345]: DEBUG nova.scheduler.client.report [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 816.948958] env[63345]: DEBUG nova.virt.hardware [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 816.949285] env[63345]: DEBUG nova.virt.hardware [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 816.949425] env[63345]: DEBUG nova.virt.hardware [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 816.949611] env[63345]: DEBUG nova.virt.hardware [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 816.949756] env[63345]: DEBUG nova.virt.hardware [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 816.949967] env[63345]: DEBUG nova.virt.hardware [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 816.950612] env[63345]: DEBUG nova.virt.hardware [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 816.950725] env[63345]: DEBUG nova.virt.hardware [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 816.950894] env[63345]: DEBUG nova.virt.hardware [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 816.951346] env[63345]: DEBUG nova.virt.hardware [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 816.951560] env[63345]: DEBUG nova.virt.hardware [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 816.952517] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893e9906-7d2e-48af-8fbf-ca536b942154 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.964284] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10457af1-ee5e-4074-a237-de133a9917a9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.093076] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017138, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.175864] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Releasing lock "refresh_cache-df2f06af-54a6-4dbd-83ff-1e4b066acbf3" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.176760] env[63345]: DEBUG nova.compute.manager [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Instance network_info: |[{"id": "9682a3c3-bc5f-4d38-998f-d008a47b824e", "address": "fa:16:3e:fa:3c:05", "network": {"id": "c9f406eb-96bc-4c63-8f76-474a8fcc4f7d", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1744519845-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc3b4aff33e540d79c796f98c315a05a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9682a3c3-bc", "ovs_interfaceid": "9682a3c3-bc5f-4d38-998f-d008a47b824e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 817.176760] env[63345]: DEBUG oslo_concurrency.lockutils [req-8b382a5f-45a6-4f17-8669-8a352a6e165e req-ae5cb4b0-b7dd-4517-9fde-17d00e687b2e service nova] Acquired lock "refresh_cache-df2f06af-54a6-4dbd-83ff-1e4b066acbf3" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.176760] env[63345]: DEBUG nova.network.neutron [req-8b382a5f-45a6-4f17-8669-8a352a6e165e req-ae5cb4b0-b7dd-4517-9fde-17d00e687b2e service nova] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Refreshing network info cache for port 9682a3c3-bc5f-4d38-998f-d008a47b824e {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 817.177973] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:3c:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9682a3c3-bc5f-4d38-998f-d008a47b824e', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 817.188253] env[63345]: DEBUG oslo.service.loopingcall [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 817.191792] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 817.192469] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b51a325b-25e4-4856-bbe3-1ac65f5a309a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.215929] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 817.215929] env[63345]: value = "task-1017140" [ 817.215929] env[63345]: _type = "Task" [ 817.215929] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.225872] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017140, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.284354] env[63345]: INFO nova.compute.manager [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Took 47.67 seconds to build instance. [ 817.310101] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017139, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.329308] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52fd372b-3d0f-33b0-9932-4ff0e02e4d26, 'name': SearchDatastore_Task, 'duration_secs': 0.049225} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.331196] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c10e80b-8294-473b-8a56-3293e24617be {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.342475] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 817.342475] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5297a7f0-c28d-db5d-c9ec-e56e75cb3cb0" [ 817.342475] env[63345]: _type = "Task" [ 817.342475] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.351634] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5297a7f0-c28d-db5d-c9ec-e56e75cb3cb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.432036] env[63345]: DEBUG oslo_concurrency.lockutils [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.563s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.432789] env[63345]: DEBUG nova.compute.manager [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 817.435886] env[63345]: DEBUG oslo_concurrency.lockutils [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.062s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.436264] env[63345]: DEBUG oslo_concurrency.lockutils [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.438707] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.570s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.440055] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.441730] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.262s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.443703] env[63345]: INFO nova.compute.claims [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 817.469128] env[63345]: DEBUG nova.network.neutron [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Successfully updated port: af759cda-f432-45a8-afdd-ead0d3533779 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 817.497397] env[63345]: INFO nova.scheduler.client.report [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Deleted allocations for instance 3e4e58bd-903b-4b3d-8be4-5678aab6c721 [ 817.595083] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017138, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.728830] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017140, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.729887] env[63345]: DEBUG nova.network.neutron [req-8b382a5f-45a6-4f17-8669-8a352a6e165e req-ae5cb4b0-b7dd-4517-9fde-17d00e687b2e service nova] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Updated VIF entry in instance network info cache for port 9682a3c3-bc5f-4d38-998f-d008a47b824e. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 817.730244] env[63345]: DEBUG nova.network.neutron [req-8b382a5f-45a6-4f17-8669-8a352a6e165e req-ae5cb4b0-b7dd-4517-9fde-17d00e687b2e service nova] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Updating instance_info_cache with network_info: [{"id": "9682a3c3-bc5f-4d38-998f-d008a47b824e", "address": "fa:16:3e:fa:3c:05", "network": {"id": "c9f406eb-96bc-4c63-8f76-474a8fcc4f7d", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1744519845-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc3b4aff33e540d79c796f98c315a05a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9682a3c3-bc", "ovs_interfaceid": "9682a3c3-bc5f-4d38-998f-d008a47b824e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.785578] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7b2128-5bed-405f-ab77-c736993db645 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "11652422-9136-4453-b932-06695f9bc910" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.613s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.811706] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017139, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.877064} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.812011] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 9aa651b8-317d-4153-8c33-9df0a5d16115/9aa651b8-317d-4153-8c33-9df0a5d16115.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 817.812243] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 817.812512] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-186acd96-dd3a-4bd1-ae32-14c18f4644ca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.820032] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 817.820032] env[63345]: value = "task-1017141" [ 817.820032] env[63345]: _type = "Task" [ 817.820032] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.828491] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017141, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.859184] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5297a7f0-c28d-db5d-c9ec-e56e75cb3cb0, 'name': SearchDatastore_Task, 'duration_secs': 0.059877} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.859184] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.859184] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] a85688b0-d68f-4370-bd95-dc9fb1d2c26a/a85688b0-d68f-4370-bd95-dc9fb1d2c26a.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 817.859184] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef37e0e5-08b9-4aaa-8cf7-68be37758868 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.865754] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 817.865754] env[63345]: value = "task-1017142" [ 817.865754] env[63345]: _type = "Task" [ 817.865754] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.874833] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017142, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.953565] env[63345]: DEBUG nova.compute.utils [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 817.954950] env[63345]: DEBUG nova.compute.manager [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 817.955184] env[63345]: DEBUG nova.network.neutron [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 817.974536] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Acquiring lock "refresh_cache-37f269fe-0266-4c03-9641-e6f43072657a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.974872] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Acquired lock "refresh_cache-37f269fe-0266-4c03-9641-e6f43072657a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.975109] env[63345]: DEBUG nova.network.neutron [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 817.984958] env[63345]: DEBUG oslo_concurrency.lockutils [None req-27434f8c-bdad-4693-b94d-700a2c631c2f tempest-ServerActionsV293TestJSON-1003580665 tempest-ServerActionsV293TestJSON-1003580665-project-member] Lock "3b0d115d-dad5-4881-a0e0-b98f555da533" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.181s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.007288] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ff43f3f2-d1c3-4221-8082-464b2621fcad tempest-ListServerFiltersTestJSON-10594905 tempest-ListServerFiltersTestJSON-10594905-project-member] Lock "3e4e58bd-903b-4b3d-8be4-5678aab6c721" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.795s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.013254] env[63345]: DEBUG nova.policy [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dd618fef89a843209784ca9e925d18eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cb91ecf5d00e48dea9baf2122ac4fed7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 818.081265] env[63345]: DEBUG nova.network.neutron [-] [instance: f043239f-7158-4199-a784-d711a5a301be] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.096237] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017138, 'name': ReconfigVM_Task, 'duration_secs': 1.026708} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.096761] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Reconfigured VM instance instance-00000040 to attach disk [datastore2] 4f108dcc-c130-4c3f-840d-7a912150db3f/4f108dcc-c130-4c3f-840d-7a912150db3f.vmdk or device None with type streamOptimized {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 818.097508] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-917eccb2-cf79-4da8-9038-b50fa7d9cc2d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.105422] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 818.105422] env[63345]: value = "task-1017143" [ 818.105422] env[63345]: _type = "Task" [ 818.105422] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.114575] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017143, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.231497] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017140, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.232587] env[63345]: DEBUG oslo_concurrency.lockutils [req-8b382a5f-45a6-4f17-8669-8a352a6e165e req-ae5cb4b0-b7dd-4517-9fde-17d00e687b2e service nova] Releasing lock "refresh_cache-df2f06af-54a6-4dbd-83ff-1e4b066acbf3" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.289628] env[63345]: DEBUG nova.compute.manager [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 818.332932] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017141, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.130643} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.333312] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 818.334576] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf89b2a-277b-4e2b-95a4-db39a822f208 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.358906] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 9aa651b8-317d-4153-8c33-9df0a5d16115/9aa651b8-317d-4153-8c33-9df0a5d16115.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 818.359936] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e146af4d-efb7-4cf2-8d84-8ca54529fe6e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.396196] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017142, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.398681] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 818.398681] env[63345]: value = "task-1017144" [ 818.398681] env[63345]: _type = "Task" [ 818.398681] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.410271] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017144, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.461032] env[63345]: DEBUG nova.compute.manager [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 818.533857] env[63345]: DEBUG nova.network.neutron [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 818.591499] env[63345]: INFO nova.compute.manager [-] [instance: f043239f-7158-4199-a784-d711a5a301be] Took 2.01 seconds to deallocate network for instance. [ 818.617580] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017143, 'name': Rename_Task, 'duration_secs': 0.3391} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.620755] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 818.621300] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fdf326f2-387d-45fb-9411-7d8d8d2c0a50 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.629391] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 818.629391] env[63345]: value = "task-1017145" [ 818.629391] env[63345]: _type = "Task" [ 818.629391] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.643420] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017145, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.719223] env[63345]: DEBUG nova.network.neutron [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Updating instance_info_cache with network_info: [{"id": "af759cda-f432-45a8-afdd-ead0d3533779", "address": "fa:16:3e:f8:59:03", "network": {"id": "ea58a9a6-c986-4397-bc97-8583e1d33355", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1287890533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f090462fd22418691d9284bfa52c142", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7806fe18-2b89-4386-87b1-f22876f82af2", "external-id": "nsx-vlan-transportzone-727", "segmentation_id": 727, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf759cda-f4", "ovs_interfaceid": "af759cda-f432-45a8-afdd-ead0d3533779", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.735022] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017140, 'name': CreateVM_Task, 'duration_secs': 1.500143} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.735979] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 818.737319] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.737562] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.737985] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 818.738604] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5416f1c-4bcd-493f-8eb4-8da36f8de95f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.745211] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 818.745211] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52e04a8c-fb32-7a29-279b-190d83e643d6" [ 818.745211] env[63345]: _type = "Task" [ 818.745211] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.757744] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e04a8c-fb32-7a29-279b-190d83e643d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.775852] env[63345]: DEBUG nova.network.neutron [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Successfully created port: 07017fee-f295-4317-9453-e41726d715c5 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 818.812885] env[63345]: DEBUG oslo_concurrency.lockutils [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.899136] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017142, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.758398} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.899136] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] a85688b0-d68f-4370-bd95-dc9fb1d2c26a/a85688b0-d68f-4370-bd95-dc9fb1d2c26a.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 818.899136] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 818.899136] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9978b862-87ed-4680-8ecf-5b452e3ab983 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.909372] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017144, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.914027] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dfec124-9c4c-42e1-af47-8dbac90bc0a5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.915282] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 818.915282] env[63345]: value = "task-1017146" [ 818.915282] env[63345]: _type = "Task" [ 818.915282] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.923120] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-804fb303-4a00-43b8-909a-86a0df65d023 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.928822] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017146, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.962949] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b9e759-53d2-44d4-a2cb-918bade46f05 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.977726] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae613e6-122b-4fe5-8f27-c5d96f05dbd1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.993746] env[63345]: DEBUG nova.compute.provider_tree [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 818.997621] env[63345]: DEBUG nova.compute.manager [req-089905ae-6061-4c0c-ad07-14dadfafc752 req-48a1c9dd-010a-4706-9e9c-0cef8c570e5a service nova] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Received event network-vif-plugged-af759cda-f432-45a8-afdd-ead0d3533779 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 818.997899] env[63345]: DEBUG oslo_concurrency.lockutils [req-089905ae-6061-4c0c-ad07-14dadfafc752 req-48a1c9dd-010a-4706-9e9c-0cef8c570e5a service nova] Acquiring lock "37f269fe-0266-4c03-9641-e6f43072657a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.998071] env[63345]: DEBUG oslo_concurrency.lockutils [req-089905ae-6061-4c0c-ad07-14dadfafc752 req-48a1c9dd-010a-4706-9e9c-0cef8c570e5a service nova] Lock "37f269fe-0266-4c03-9641-e6f43072657a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.998248] env[63345]: DEBUG oslo_concurrency.lockutils [req-089905ae-6061-4c0c-ad07-14dadfafc752 req-48a1c9dd-010a-4706-9e9c-0cef8c570e5a service nova] Lock "37f269fe-0266-4c03-9641-e6f43072657a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.998423] env[63345]: DEBUG nova.compute.manager [req-089905ae-6061-4c0c-ad07-14dadfafc752 req-48a1c9dd-010a-4706-9e9c-0cef8c570e5a service nova] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] No waiting events found dispatching network-vif-plugged-af759cda-f432-45a8-afdd-ead0d3533779 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 818.998597] env[63345]: WARNING nova.compute.manager [req-089905ae-6061-4c0c-ad07-14dadfafc752 req-48a1c9dd-010a-4706-9e9c-0cef8c570e5a service nova] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Received unexpected event network-vif-plugged-af759cda-f432-45a8-afdd-ead0d3533779 for instance with vm_state building and task_state spawning. [ 818.998770] env[63345]: DEBUG nova.compute.manager [req-089905ae-6061-4c0c-ad07-14dadfafc752 req-48a1c9dd-010a-4706-9e9c-0cef8c570e5a service nova] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Received event network-changed-af759cda-f432-45a8-afdd-ead0d3533779 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 818.998986] env[63345]: DEBUG nova.compute.manager [req-089905ae-6061-4c0c-ad07-14dadfafc752 req-48a1c9dd-010a-4706-9e9c-0cef8c570e5a service nova] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Refreshing instance network info cache due to event network-changed-af759cda-f432-45a8-afdd-ead0d3533779. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 818.999272] env[63345]: DEBUG oslo_concurrency.lockutils [req-089905ae-6061-4c0c-ad07-14dadfafc752 req-48a1c9dd-010a-4706-9e9c-0cef8c570e5a service nova] Acquiring lock "refresh_cache-37f269fe-0266-4c03-9641-e6f43072657a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.103453] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.141249] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017145, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.226295] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Releasing lock "refresh_cache-37f269fe-0266-4c03-9641-e6f43072657a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.226586] env[63345]: DEBUG nova.compute.manager [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Instance network_info: |[{"id": "af759cda-f432-45a8-afdd-ead0d3533779", "address": "fa:16:3e:f8:59:03", "network": {"id": "ea58a9a6-c986-4397-bc97-8583e1d33355", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1287890533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f090462fd22418691d9284bfa52c142", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7806fe18-2b89-4386-87b1-f22876f82af2", "external-id": "nsx-vlan-transportzone-727", "segmentation_id": 727, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf759cda-f4", "ovs_interfaceid": "af759cda-f432-45a8-afdd-ead0d3533779", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 819.226882] env[63345]: DEBUG oslo_concurrency.lockutils [req-089905ae-6061-4c0c-ad07-14dadfafc752 req-48a1c9dd-010a-4706-9e9c-0cef8c570e5a service nova] Acquired lock "refresh_cache-37f269fe-0266-4c03-9641-e6f43072657a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.227104] env[63345]: DEBUG nova.network.neutron [req-089905ae-6061-4c0c-ad07-14dadfafc752 req-48a1c9dd-010a-4706-9e9c-0cef8c570e5a service nova] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Refreshing network info cache for port af759cda-f432-45a8-afdd-ead0d3533779 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 819.228436] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:59:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7806fe18-2b89-4386-87b1-f22876f82af2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af759cda-f432-45a8-afdd-ead0d3533779', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 819.236378] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Creating folder: Project (5f090462fd22418691d9284bfa52c142). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 819.237695] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-43aa0dbb-dd18-4b9f-b4f1-5482756b0f3a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.251984] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Created folder: Project (5f090462fd22418691d9284bfa52c142) in parent group-v225918. [ 819.252268] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Creating folder: Instances. Parent ref: group-v226050. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 819.256503] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20facd98-ecba-44c6-b917-502d48f20c09 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.267025] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e04a8c-fb32-7a29-279b-190d83e643d6, 'name': SearchDatastore_Task, 'duration_secs': 0.020638} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.267025] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.267025] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 819.267025] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.267025] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.267025] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 819.267402] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-996bdbb3-1228-478d-bae1-5da393b034ee {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.270587] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Created folder: Instances in parent group-v226050. [ 819.270835] env[63345]: DEBUG oslo.service.loopingcall [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 819.271492] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 819.271731] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c042db95-e628-48fd-8db2-7660e97dd975 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.287937] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 819.288168] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 819.289396] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7716e97e-31d2-48c8-8588-5abd3ee1202a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.294936] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 819.294936] env[63345]: value = "task-1017149" [ 819.294936] env[63345]: _type = "Task" [ 819.294936] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.296392] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 819.296392] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5256a2e4-da7d-7aaf-195d-54b29f14c2ab" [ 819.296392] env[63345]: _type = "Task" [ 819.296392] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.311103] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017149, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.311424] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5256a2e4-da7d-7aaf-195d-54b29f14c2ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.410774] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017144, 'name': ReconfigVM_Task, 'duration_secs': 0.602101} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.411150] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 9aa651b8-317d-4153-8c33-9df0a5d16115/9aa651b8-317d-4153-8c33-9df0a5d16115.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 819.411799] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-548d6272-5010-4bf3-a49d-46343ef05f31 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.420030] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 819.420030] env[63345]: value = "task-1017150" [ 819.420030] env[63345]: _type = "Task" [ 819.420030] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.426588] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017146, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105693} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.426855] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 819.427631] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792f61bd-63c8-4653-9fa4-b8ff69c79093 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.435134] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017150, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.466336] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] a85688b0-d68f-4370-bd95-dc9fb1d2c26a/a85688b0-d68f-4370-bd95-dc9fb1d2c26a.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 819.466336] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2e1d270-9df1-4782-820e-ab87b5e12135 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.487564] env[63345]: DEBUG nova.compute.manager [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 819.496485] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 819.496485] env[63345]: value = "task-1017151" [ 819.496485] env[63345]: _type = "Task" [ 819.496485] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.501692] env[63345]: DEBUG nova.scheduler.client.report [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 819.511354] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017151, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.528295] env[63345]: DEBUG nova.virt.hardware [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 819.528560] env[63345]: DEBUG nova.virt.hardware [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 819.528726] env[63345]: DEBUG nova.virt.hardware [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 819.528919] env[63345]: DEBUG nova.virt.hardware [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 819.529675] env[63345]: DEBUG nova.virt.hardware [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 819.529675] env[63345]: DEBUG nova.virt.hardware [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 819.529675] env[63345]: DEBUG nova.virt.hardware [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 819.529916] env[63345]: DEBUG nova.virt.hardware [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 819.529951] env[63345]: DEBUG nova.virt.hardware [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 819.530133] env[63345]: DEBUG nova.virt.hardware [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 819.530321] env[63345]: DEBUG nova.virt.hardware [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 819.532441] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f246455c-1c73-42e2-88fd-9f6e375b1f15 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.542759] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90b19cd-fb89-4b7e-aec4-07296a6ab8f8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.640857] env[63345]: DEBUG oslo_vmware.api [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017145, 'name': PowerOnVM_Task, 'duration_secs': 0.695744} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.641227] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 819.641469] env[63345]: INFO nova.compute.manager [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Took 16.93 seconds to spawn the instance on the hypervisor. [ 819.641689] env[63345]: DEBUG nova.compute.manager [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 819.642545] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59fbb05-bde8-4bec-8f78-d7b336411752 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.811155] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5256a2e4-da7d-7aaf-195d-54b29f14c2ab, 'name': SearchDatastore_Task, 'duration_secs': 0.013355} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.815080] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017149, 'name': CreateVM_Task, 'duration_secs': 0.421485} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.815304] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c9c5608-672a-4df3-bbb9-ad8178b8da1f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.817602] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 819.818310] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.818500] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.818827] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 819.819447] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c30a0894-cec1-4ee7-9bc3-6e6599a5f937 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.823428] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 819.823428] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52043b15-f2c6-9d3d-cd52-508dd4fb6112" [ 819.823428] env[63345]: _type = "Task" [ 819.823428] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.824229] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Waiting for the task: (returnval){ [ 819.824229] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f311c7-7ea7-d047-b277-9dfdd39b54e0" [ 819.824229] env[63345]: _type = "Task" [ 819.824229] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.836989] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52043b15-f2c6-9d3d-cd52-508dd4fb6112, 'name': SearchDatastore_Task} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.840946] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.841367] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] df2f06af-54a6-4dbd-83ff-1e4b066acbf3/df2f06af-54a6-4dbd-83ff-1e4b066acbf3.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 819.842081] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f311c7-7ea7-d047-b277-9dfdd39b54e0, 'name': SearchDatastore_Task, 'duration_secs': 0.01118} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.842302] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-829cd882-b748-4016-8d76-f7269af8f015 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.845056] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.845301] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 819.845537] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.845687] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.845871] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 819.846146] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6220206-1ff9-48b1-bed0-67deec03e9fd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.856704] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 819.856704] env[63345]: value = "task-1017152" [ 819.856704] env[63345]: _type = "Task" [ 819.856704] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.857062] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 819.857283] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 819.860817] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-555c5aa3-1127-4435-a6c3-e1baf1468285 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.866928] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Waiting for the task: (returnval){ [ 819.866928] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c4d83c-5bc5-70d6-e7f1-461e529ea09a" [ 819.866928] env[63345]: _type = "Task" [ 819.866928] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.872622] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017152, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.886174] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c4d83c-5bc5-70d6-e7f1-461e529ea09a, 'name': SearchDatastore_Task, 'duration_secs': 0.012101} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.887175] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64e305f9-3228-48d3-ab48-03935c53852a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.894932] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Waiting for the task: (returnval){ [ 819.894932] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]526e0153-2a62-a264-c864-6f4492dbbade" [ 819.894932] env[63345]: _type = "Task" [ 819.894932] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.906395] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526e0153-2a62-a264-c864-6f4492dbbade, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.931409] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017150, 'name': Rename_Task, 'duration_secs': 0.253007} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.931409] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 819.931705] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-16a5521b-79fa-4f10-87ff-03933ea129fd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.939286] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 819.939286] env[63345]: value = "task-1017153" [ 819.939286] env[63345]: _type = "Task" [ 819.939286] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.950636] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017153, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.007411] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.566s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.008103] env[63345]: DEBUG nova.compute.manager [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 820.014062] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017151, 'name': ReconfigVM_Task, 'duration_secs': 0.397174} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.014062] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.487s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.014062] env[63345]: DEBUG nova.objects.instance [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Lazy-loading 'resources' on Instance uuid a9b69d13-6330-4f9b-b8e1-1c0017655f9f {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 820.014062] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Reconfigured VM instance instance-00000043 to attach disk [datastore2] a85688b0-d68f-4370-bd95-dc9fb1d2c26a/a85688b0-d68f-4370-bd95-dc9fb1d2c26a.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 820.014062] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-96fbaf62-f2d8-4d8b-914c-5b11b22b4244 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.023134] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 820.023134] env[63345]: value = "task-1017154" [ 820.023134] env[63345]: _type = "Task" [ 820.023134] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.032625] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017154, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.145090] env[63345]: DEBUG nova.network.neutron [req-089905ae-6061-4c0c-ad07-14dadfafc752 req-48a1c9dd-010a-4706-9e9c-0cef8c570e5a service nova] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Updated VIF entry in instance network info cache for port af759cda-f432-45a8-afdd-ead0d3533779. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 820.145090] env[63345]: DEBUG nova.network.neutron [req-089905ae-6061-4c0c-ad07-14dadfafc752 req-48a1c9dd-010a-4706-9e9c-0cef8c570e5a service nova] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Updating instance_info_cache with network_info: [{"id": "af759cda-f432-45a8-afdd-ead0d3533779", "address": "fa:16:3e:f8:59:03", "network": {"id": "ea58a9a6-c986-4397-bc97-8583e1d33355", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1287890533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f090462fd22418691d9284bfa52c142", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7806fe18-2b89-4386-87b1-f22876f82af2", "external-id": "nsx-vlan-transportzone-727", "segmentation_id": 727, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf759cda-f4", "ovs_interfaceid": "af759cda-f432-45a8-afdd-ead0d3533779", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.163490] env[63345]: INFO nova.compute.manager [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Took 52.68 seconds to build instance. [ 820.376565] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017152, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.412671] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526e0153-2a62-a264-c864-6f4492dbbade, 'name': SearchDatastore_Task, 'duration_secs': 0.010839} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.412671] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.412671] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 37f269fe-0266-4c03-9641-e6f43072657a/37f269fe-0266-4c03-9641-e6f43072657a.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 820.412671] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f99e8813-b447-49b6-9597-af7b95cef670 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.422663] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Waiting for the task: (returnval){ [ 820.422663] env[63345]: value = "task-1017155" [ 820.422663] env[63345]: _type = "Task" [ 820.422663] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.434737] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': task-1017155, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.455291] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017153, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.513904] env[63345]: DEBUG nova.compute.utils [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 820.516519] env[63345]: DEBUG nova.compute.manager [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 820.516851] env[63345]: DEBUG nova.network.neutron [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 820.548293] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017154, 'name': Rename_Task, 'duration_secs': 0.19985} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.548293] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 820.548293] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0501620-f181-46ae-a373-72cf18caf142 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.567022] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 820.567022] env[63345]: value = "task-1017156" [ 820.567022] env[63345]: _type = "Task" [ 820.567022] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.584841] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017156, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.613382] env[63345]: DEBUG nova.policy [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e98db0d334fa432697f093346f061acc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee96c4ba2a4e4a1fb9b903bed97d2500', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 820.647474] env[63345]: DEBUG oslo_concurrency.lockutils [req-089905ae-6061-4c0c-ad07-14dadfafc752 req-48a1c9dd-010a-4706-9e9c-0cef8c570e5a service nova] Releasing lock "refresh_cache-37f269fe-0266-4c03-9641-e6f43072657a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.647804] env[63345]: DEBUG nova.compute.manager [req-089905ae-6061-4c0c-ad07-14dadfafc752 req-48a1c9dd-010a-4706-9e9c-0cef8c570e5a service nova] [instance: f043239f-7158-4199-a784-d711a5a301be] Received event network-vif-deleted-a8153077-1984-4619-ae74-08c5902cfff8 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 820.671280] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5ae4f11e-84f1-4966-8260-3f3150988ffa tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "4f108dcc-c130-4c3f-840d-7a912150db3f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.197s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.875733] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017152, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539187} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.876059] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] df2f06af-54a6-4dbd-83ff-1e4b066acbf3/df2f06af-54a6-4dbd-83ff-1e4b066acbf3.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 820.876294] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 820.876571] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0238b5cf-2eb9-4c0d-8d6e-0fff626787d6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.888981] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 820.888981] env[63345]: value = "task-1017157" [ 820.888981] env[63345]: _type = "Task" [ 820.888981] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.902146] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017157, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.942562] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': task-1017155, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.957118] env[63345]: DEBUG oslo_vmware.api [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017153, 'name': PowerOnVM_Task, 'duration_secs': 0.587014} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.957499] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 820.957743] env[63345]: INFO nova.compute.manager [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Took 11.85 seconds to spawn the instance on the hypervisor. [ 820.957929] env[63345]: DEBUG nova.compute.manager [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 820.958818] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a88bdb-bf93-4901-b577-34f389271beb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.021792] env[63345]: DEBUG nova.compute.manager [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 821.031124] env[63345]: DEBUG nova.network.neutron [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Successfully created port: d58bb0b4-4fd1-4361-b242-9e186e278f08 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 821.064149] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3c7b88-23ef-45c9-ac10-1987c4390b4d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.084960] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6666c94a-fc9c-4d4b-9ce7-3f421ad591a7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.088124] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017156, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.885779] env[63345]: DEBUG nova.compute.manager [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 821.901869] env[63345]: DEBUG nova.network.neutron [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Successfully updated port: 07017fee-f295-4317-9453-e41726d715c5 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 821.914656] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e9472c-8724-447f-8978-cd30a0e329da {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.920342] env[63345]: DEBUG oslo_concurrency.lockutils [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "refresh_cache-691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.920342] env[63345]: DEBUG oslo_concurrency.lockutils [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired lock "refresh_cache-691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.920342] env[63345]: DEBUG nova.network.neutron [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 821.920548] env[63345]: DEBUG nova.compute.manager [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Stashing vm_state: active {{(pid=63345) _prep_resize /opt/stack/nova/nova/compute/manager.py:5953}} [ 821.923408] env[63345]: INFO nova.compute.manager [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Took 45.82 seconds to build instance. [ 821.926089] env[63345]: DEBUG nova.compute.manager [req-2c352305-cc93-4fdf-ac48-2f081df8d2b3 req-11526b0e-6327-4e77-b98b-a60b9c23e5d4 service nova] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Received event network-vif-plugged-07017fee-f295-4317-9453-e41726d715c5 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 821.926315] env[63345]: DEBUG oslo_concurrency.lockutils [req-2c352305-cc93-4fdf-ac48-2f081df8d2b3 req-11526b0e-6327-4e77-b98b-a60b9c23e5d4 service nova] Acquiring lock "691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.930015] env[63345]: DEBUG oslo_concurrency.lockutils [req-2c352305-cc93-4fdf-ac48-2f081df8d2b3 req-11526b0e-6327-4e77-b98b-a60b9c23e5d4 service nova] Lock "691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.930015] env[63345]: DEBUG oslo_concurrency.lockutils [req-2c352305-cc93-4fdf-ac48-2f081df8d2b3 req-11526b0e-6327-4e77-b98b-a60b9c23e5d4 service nova] Lock "691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.930015] env[63345]: DEBUG nova.compute.manager [req-2c352305-cc93-4fdf-ac48-2f081df8d2b3 req-11526b0e-6327-4e77-b98b-a60b9c23e5d4 service nova] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] No waiting events found dispatching network-vif-plugged-07017fee-f295-4317-9453-e41726d715c5 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 821.930015] env[63345]: WARNING nova.compute.manager [req-2c352305-cc93-4fdf-ac48-2f081df8d2b3 req-11526b0e-6327-4e77-b98b-a60b9c23e5d4 service nova] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Received unexpected event network-vif-plugged-07017fee-f295-4317-9453-e41726d715c5 for instance with vm_state building and task_state spawning. [ 821.934837] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017157, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123189} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.944601] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 821.944812] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': task-1017155, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552301} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.945595] env[63345]: DEBUG oslo_vmware.api [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017156, 'name': PowerOnVM_Task, 'duration_secs': 1.237497} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.946017] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbfe376e-6b2f-478a-b96e-ec711cce72da {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.948950] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 37f269fe-0266-4c03-9641-e6f43072657a/37f269fe-0266-4c03-9641-e6f43072657a.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 821.949199] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 821.950459] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb783f9-de92-467a-b9ac-5783a1e685e2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.954728] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 821.954944] env[63345]: INFO nova.compute.manager [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Took 10.24 seconds to spawn the instance on the hypervisor. [ 821.955148] env[63345]: DEBUG nova.compute.manager [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 821.955702] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2e7eb69-a6c9-42db-b1af-34751fda8ca8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.958908] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34303851-715e-4851-a730-455be49bb2b9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.974868] env[63345]: DEBUG nova.compute.provider_tree [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.992565] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Waiting for the task: (returnval){ [ 821.992565] env[63345]: value = "task-1017158" [ 821.992565] env[63345]: _type = "Task" [ 821.992565] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.000533] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] df2f06af-54a6-4dbd-83ff-1e4b066acbf3/df2f06af-54a6-4dbd-83ff-1e4b066acbf3.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 822.002907] env[63345]: DEBUG nova.scheduler.client.report [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 822.006075] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d23ec31-90a9-43c8-b2ab-a10f99ae0e80 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.034790] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': task-1017158, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.036534] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 822.036534] env[63345]: value = "task-1017159" [ 822.036534] env[63345]: _type = "Task" [ 822.036534] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.045808] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017159, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.420241] env[63345]: DEBUG nova.compute.manager [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 822.423887] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.426421] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "4f108dcc-c130-4c3f-840d-7a912150db3f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.426654] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "4f108dcc-c130-4c3f-840d-7a912150db3f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.426860] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "4f108dcc-c130-4c3f-840d-7a912150db3f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.427176] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "4f108dcc-c130-4c3f-840d-7a912150db3f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.427390] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "4f108dcc-c130-4c3f-840d-7a912150db3f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.434505] env[63345]: INFO nova.compute.manager [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Terminating instance [ 822.449634] env[63345]: DEBUG nova.virt.hardware [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 822.450042] env[63345]: DEBUG nova.virt.hardware [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 822.450179] env[63345]: DEBUG nova.virt.hardware [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 822.450430] env[63345]: DEBUG nova.virt.hardware [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 822.450668] env[63345]: DEBUG nova.virt.hardware [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 822.450931] env[63345]: DEBUG nova.virt.hardware [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 822.451267] env[63345]: DEBUG nova.virt.hardware [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 822.451499] env[63345]: DEBUG nova.virt.hardware [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 822.451704] env[63345]: DEBUG nova.virt.hardware [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 822.451879] env[63345]: DEBUG nova.virt.hardware [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 822.452087] env[63345]: DEBUG nova.virt.hardware [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 822.453039] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac5eab8-2f6f-4d5e-aba6-d108ec82d5aa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.457224] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.466879] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc63759a-f7d8-4711-8fbf-679f254d4dd7 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "9aa651b8-317d-4153-8c33-9df0a5d16115" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.432s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.470035] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2700a1f-67c3-4c5a-8f4e-8d879f6f8b89 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.513775] env[63345]: INFO nova.compute.manager [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Took 45.18 seconds to build instance. [ 822.518946] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': task-1017158, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072605} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.518946] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 822.519648] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b044533-7bdd-4853-8218-b28bbd61e068 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.524143] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.513s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.537515] env[63345]: DEBUG oslo_concurrency.lockutils [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.015s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.537830] env[63345]: DEBUG nova.objects.instance [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Lazy-loading 'resources' on Instance uuid 02eb493e-d1a1-4461-8e3f-e493e96fe058 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 822.548569] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 37f269fe-0266-4c03-9641-e6f43072657a/37f269fe-0266-4c03-9641-e6f43072657a.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 822.549670] env[63345]: DEBUG nova.network.neutron [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 822.553399] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f02007db-a855-4139-be35-625819c8208c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.572512] env[63345]: INFO nova.scheduler.client.report [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Deleted allocations for instance a9b69d13-6330-4f9b-b8e1-1c0017655f9f [ 822.582961] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017159, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.584712] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Waiting for the task: (returnval){ [ 822.584712] env[63345]: value = "task-1017160" [ 822.584712] env[63345]: _type = "Task" [ 822.584712] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.595091] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': task-1017160, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.849715] env[63345]: DEBUG nova.network.neutron [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Successfully updated port: d58bb0b4-4fd1-4361-b242-9e186e278f08 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 822.849715] env[63345]: DEBUG nova.network.neutron [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Updating instance_info_cache with network_info: [{"id": "07017fee-f295-4317-9453-e41726d715c5", "address": "fa:16:3e:d9:6b:c4", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07017fee-f2", "ovs_interfaceid": "07017fee-f295-4317-9453-e41726d715c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.941017] env[63345]: DEBUG nova.compute.manager [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 822.941017] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 822.941017] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e202417-f838-4228-b769-98e6b09a9aa6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.950119] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 822.950610] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1922ecf9-1ed1-47e2-92ab-aaeee729950a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.957923] env[63345]: DEBUG oslo_vmware.api [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 822.957923] env[63345]: value = "task-1017161" [ 822.957923] env[63345]: _type = "Task" [ 822.957923] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.967518] env[63345]: DEBUG oslo_vmware.api [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017161, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.019630] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8aec1bb3-37ff-458a-a41c-88bca9521f21 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "a85688b0-d68f-4370-bd95-dc9fb1d2c26a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.699s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.067021] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017159, 'name': ReconfigVM_Task, 'duration_secs': 0.747711} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.067021] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Reconfigured VM instance instance-00000044 to attach disk [datastore2] df2f06af-54a6-4dbd-83ff-1e4b066acbf3/df2f06af-54a6-4dbd-83ff-1e4b066acbf3.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 823.067021] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d104bf5-744b-49a7-ac34-6fc7322cf5a2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.072551] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 823.072551] env[63345]: value = "task-1017162" [ 823.072551] env[63345]: _type = "Task" [ 823.072551] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.087028] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017162, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.088074] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1125b816-eb22-46d7-bce9-e2884c5691f2 tempest-ServerMetadataNegativeTestJSON-2056845800 tempest-ServerMetadataNegativeTestJSON-2056845800-project-member] Lock "a9b69d13-6330-4f9b-b8e1-1c0017655f9f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.561s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.102198] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': task-1017160, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.352463] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Acquiring lock "refresh_cache-40d228ea-881e-4442-a16a-6758d061aa39" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.352717] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Acquired lock "refresh_cache-40d228ea-881e-4442-a16a-6758d061aa39" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.352966] env[63345]: DEBUG nova.network.neutron [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 823.355058] env[63345]: DEBUG oslo_concurrency.lockutils [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Releasing lock "refresh_cache-691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.355671] env[63345]: DEBUG nova.compute.manager [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Instance network_info: |[{"id": "07017fee-f295-4317-9453-e41726d715c5", "address": "fa:16:3e:d9:6b:c4", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07017fee-f2", "ovs_interfaceid": "07017fee-f295-4317-9453-e41726d715c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 823.357063] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:6b:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '68add7d6-c025-46fa-84d3-9c589adb63e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07017fee-f295-4317-9453-e41726d715c5', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 823.367903] env[63345]: DEBUG oslo.service.loopingcall [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 823.368961] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 823.369245] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f050b34-171c-460a-ba25-f837c618e982 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.396221] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 823.396221] env[63345]: value = "task-1017163" [ 823.396221] env[63345]: _type = "Task" [ 823.396221] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.406400] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017163, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.470037] env[63345]: DEBUG oslo_vmware.api [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017161, 'name': PowerOffVM_Task, 'duration_secs': 0.209478} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.470335] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 823.470514] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 823.470771] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e155a1bc-0fd8-4437-9bb6-46f0c51fde19 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.723396] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017162, 'name': Rename_Task, 'duration_secs': 0.209771} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.726887] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 823.726887] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': task-1017160, 'name': ReconfigVM_Task, 'duration_secs': 0.52978} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.726887] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-268a314e-be35-4bf4-91e6-4e47306dbdd1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.729824] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 37f269fe-0266-4c03-9641-e6f43072657a/37f269fe-0266-4c03-9641-e6f43072657a.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 823.730952] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a4eed14f-b755-4340-97c1-f4a479ba05e9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.746081] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 823.746081] env[63345]: value = "task-1017165" [ 823.746081] env[63345]: _type = "Task" [ 823.746081] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.746799] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Waiting for the task: (returnval){ [ 823.746799] env[63345]: value = "task-1017166" [ 823.746799] env[63345]: _type = "Task" [ 823.746799] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.771814] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': task-1017166, 'name': Rename_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.771814] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017165, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.805088] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4819f2b9-218f-4fc5-951e-692a199c1de7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.814714] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536c150f-cad3-4fba-ba09-c86009724800 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.855996] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56077b02-e7c4-46a0-875b-8e27cd8fa703 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.860683] env[63345]: DEBUG nova.compute.manager [req-eb4514b6-466a-4496-ac2a-3280039f4284 req-fbc10849-cdfe-45f1-8cd2-5ea60118e16b service nova] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Received event network-vif-plugged-d58bb0b4-4fd1-4361-b242-9e186e278f08 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 823.860741] env[63345]: DEBUG oslo_concurrency.lockutils [req-eb4514b6-466a-4496-ac2a-3280039f4284 req-fbc10849-cdfe-45f1-8cd2-5ea60118e16b service nova] Acquiring lock "40d228ea-881e-4442-a16a-6758d061aa39-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.861123] env[63345]: DEBUG oslo_concurrency.lockutils [req-eb4514b6-466a-4496-ac2a-3280039f4284 req-fbc10849-cdfe-45f1-8cd2-5ea60118e16b service nova] Lock "40d228ea-881e-4442-a16a-6758d061aa39-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.861427] env[63345]: DEBUG oslo_concurrency.lockutils [req-eb4514b6-466a-4496-ac2a-3280039f4284 req-fbc10849-cdfe-45f1-8cd2-5ea60118e16b service nova] Lock "40d228ea-881e-4442-a16a-6758d061aa39-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.861836] env[63345]: DEBUG nova.compute.manager [req-eb4514b6-466a-4496-ac2a-3280039f4284 req-fbc10849-cdfe-45f1-8cd2-5ea60118e16b service nova] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] No waiting events found dispatching network-vif-plugged-d58bb0b4-4fd1-4361-b242-9e186e278f08 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 823.862162] env[63345]: WARNING nova.compute.manager [req-eb4514b6-466a-4496-ac2a-3280039f4284 req-fbc10849-cdfe-45f1-8cd2-5ea60118e16b service nova] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Received unexpected event network-vif-plugged-d58bb0b4-4fd1-4361-b242-9e186e278f08 for instance with vm_state building and task_state spawning. [ 823.876436] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611ddd49-8dc8-4d19-a2a1-fbafd43a68fa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.887196] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 823.887536] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 823.887801] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Deleting the datastore file [datastore2] 4f108dcc-c130-4c3f-840d-7a912150db3f {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 823.888530] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fbdd8bd0-71a2-4c20-9b62-65396f85e97c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.899947] env[63345]: DEBUG nova.compute.provider_tree [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 823.907735] env[63345]: DEBUG oslo_vmware.api [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 823.907735] env[63345]: value = "task-1017167" [ 823.907735] env[63345]: _type = "Task" [ 823.907735] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.916143] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017163, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.923459] env[63345]: DEBUG oslo_vmware.api [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017167, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.949786] env[63345]: DEBUG nova.network.neutron [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 824.268397] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017165, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.273470] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': task-1017166, 'name': Rename_Task, 'duration_secs': 0.283345} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.274009] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 824.274788] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ec1d8a2-d093-4339-8472-98c57952d92a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.285901] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Waiting for the task: (returnval){ [ 824.285901] env[63345]: value = "task-1017168" [ 824.285901] env[63345]: _type = "Task" [ 824.285901] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.304908] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': task-1017168, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.345552] env[63345]: DEBUG nova.network.neutron [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Updating instance_info_cache with network_info: [{"id": "d58bb0b4-4fd1-4361-b242-9e186e278f08", "address": "fa:16:3e:9e:a3:48", "network": {"id": "18285fd9-d154-415c-acbb-1494303e3b6c", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.69", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5dc99cc64e6c4d83928b309253a8df8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd58bb0b4-4f", "ovs_interfaceid": "d58bb0b4-4fd1-4361-b242-9e186e278f08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.406272] env[63345]: DEBUG nova.scheduler.client.report [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 824.414111] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017163, 'name': CreateVM_Task, 'duration_secs': 0.679833} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.417564] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 824.418347] env[63345]: DEBUG oslo_concurrency.lockutils [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 824.418504] env[63345]: DEBUG oslo_concurrency.lockutils [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.418854] env[63345]: DEBUG oslo_concurrency.lockutils [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 824.419573] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f076f9a1-31f2-4eab-ac62-ad7439144d5d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.427471] env[63345]: DEBUG oslo_vmware.api [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017167, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.430806] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 824.430806] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5215d97f-f4e6-fab5-6b38-a90c0454fae8" [ 824.430806] env[63345]: _type = "Task" [ 824.430806] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.443481] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5215d97f-f4e6-fab5-6b38-a90c0454fae8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.522686] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "75fc8365-bf8d-489e-935f-a5169c6a7e62" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.522874] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "75fc8365-bf8d-489e-935f-a5169c6a7e62" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.760376] env[63345]: DEBUG oslo_vmware.api [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017165, 'name': PowerOnVM_Task, 'duration_secs': 0.921883} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.760759] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 824.761034] env[63345]: INFO nova.compute.manager [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Took 10.46 seconds to spawn the instance on the hypervisor. [ 824.761286] env[63345]: DEBUG nova.compute.manager [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 824.762294] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4f7889-2202-4ef9-9108-9825e66b2451 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.796921] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': task-1017168, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.803362] env[63345]: DEBUG oslo_concurrency.lockutils [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.803669] env[63345]: DEBUG oslo_concurrency.lockutils [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.849016] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Releasing lock "refresh_cache-40d228ea-881e-4442-a16a-6758d061aa39" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.849347] env[63345]: DEBUG nova.compute.manager [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Instance network_info: |[{"id": "d58bb0b4-4fd1-4361-b242-9e186e278f08", "address": "fa:16:3e:9e:a3:48", "network": {"id": "18285fd9-d154-415c-acbb-1494303e3b6c", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.69", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5dc99cc64e6c4d83928b309253a8df8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd58bb0b4-4f", "ovs_interfaceid": "d58bb0b4-4fd1-4361-b242-9e186e278f08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 824.849786] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:a3:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd58bb0b4-4fd1-4361-b242-9e186e278f08', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 824.857611] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Creating folder: Project (ee96c4ba2a4e4a1fb9b903bed97d2500). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 824.857938] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9089c64f-cc86-41ac-8882-a0d0dd2ee60e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.873098] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Created folder: Project (ee96c4ba2a4e4a1fb9b903bed97d2500) in parent group-v225918. [ 824.873403] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Creating folder: Instances. Parent ref: group-v226054. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 824.873531] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ada0ebe-5a6b-4d19-8daa-69a7531a38ad {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.885265] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Created folder: Instances in parent group-v226054. [ 824.885536] env[63345]: DEBUG oslo.service.loopingcall [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 824.885743] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 824.885992] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d545ecf-ac36-4858-81f6-0be07f735f93 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.906293] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 824.906293] env[63345]: value = "task-1017171" [ 824.906293] env[63345]: _type = "Task" [ 824.906293] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.914514] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017171, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.918238] env[63345]: DEBUG oslo_concurrency.lockutils [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.381s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.920602] env[63345]: DEBUG oslo_concurrency.lockutils [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.764s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.922275] env[63345]: INFO nova.compute.claims [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.930917] env[63345]: DEBUG oslo_vmware.api [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017167, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.736429} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.931731] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 824.931731] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 824.931862] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 824.931978] env[63345]: INFO nova.compute.manager [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Took 1.99 seconds to destroy the instance on the hypervisor. [ 824.932237] env[63345]: DEBUG oslo.service.loopingcall [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 824.932519] env[63345]: DEBUG nova.compute.manager [-] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 824.932615] env[63345]: DEBUG nova.network.neutron [-] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 824.947317] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5215d97f-f4e6-fab5-6b38-a90c0454fae8, 'name': SearchDatastore_Task, 'duration_secs': 0.011143} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.947634] env[63345]: DEBUG oslo_concurrency.lockutils [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.947882] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 824.948152] env[63345]: DEBUG oslo_concurrency.lockutils [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 824.948292] env[63345]: DEBUG oslo_concurrency.lockutils [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.948475] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 824.948749] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b09e5301-f75e-4fb2-8931-5b98e02860b3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.952792] env[63345]: INFO nova.scheduler.client.report [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Deleted allocations for instance 02eb493e-d1a1-4461-8e3f-e493e96fe058 [ 824.962914] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 824.963984] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 824.967790] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4f4d4d7-984b-49b9-95c5-b91cf0fbb09d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.980739] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 824.980739] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f171cc-15a1-91e2-41b0-4fa5892de068" [ 824.980739] env[63345]: _type = "Task" [ 824.980739] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.993529] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f171cc-15a1-91e2-41b0-4fa5892de068, 'name': SearchDatastore_Task, 'duration_secs': 0.012368} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.994366] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89dff374-ca2c-491a-8946-9e7bcef56fdb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.001349] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 825.001349] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5270918f-22a0-085d-2b36-8c5a483e5286" [ 825.001349] env[63345]: _type = "Task" [ 825.001349] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.012361] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5270918f-22a0-085d-2b36-8c5a483e5286, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.027244] env[63345]: DEBUG nova.compute.manager [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 825.110952] env[63345]: DEBUG nova.compute.manager [req-d8d2b839-4bdf-494e-876b-83b40487b6d7 req-04b5995f-e87b-44fe-905e-69d7395b8138 service nova] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Received event network-changed-07017fee-f295-4317-9453-e41726d715c5 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 825.111280] env[63345]: DEBUG nova.compute.manager [req-d8d2b839-4bdf-494e-876b-83b40487b6d7 req-04b5995f-e87b-44fe-905e-69d7395b8138 service nova] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Refreshing instance network info cache due to event network-changed-07017fee-f295-4317-9453-e41726d715c5. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 825.111743] env[63345]: DEBUG oslo_concurrency.lockutils [req-d8d2b839-4bdf-494e-876b-83b40487b6d7 req-04b5995f-e87b-44fe-905e-69d7395b8138 service nova] Acquiring lock "refresh_cache-691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.112067] env[63345]: DEBUG oslo_concurrency.lockutils [req-d8d2b839-4bdf-494e-876b-83b40487b6d7 req-04b5995f-e87b-44fe-905e-69d7395b8138 service nova] Acquired lock "refresh_cache-691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.112433] env[63345]: DEBUG nova.network.neutron [req-d8d2b839-4bdf-494e-876b-83b40487b6d7 req-04b5995f-e87b-44fe-905e-69d7395b8138 service nova] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Refreshing network info cache for port 07017fee-f295-4317-9453-e41726d715c5 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 825.285237] env[63345]: INFO nova.compute.manager [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Took 43.48 seconds to build instance. [ 825.297303] env[63345]: DEBUG oslo_vmware.api [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': task-1017168, 'name': PowerOnVM_Task, 'duration_secs': 0.604679} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.297642] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 825.297841] env[63345]: INFO nova.compute.manager [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Took 8.39 seconds to spawn the instance on the hypervisor. [ 825.298076] env[63345]: DEBUG nova.compute.manager [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 825.298943] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a84fd5a-3be1-4c52-9b1d-5d94044a5cc6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.307134] env[63345]: DEBUG nova.compute.utils [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 825.417807] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017171, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.468553] env[63345]: DEBUG oslo_concurrency.lockutils [None req-32e656c0-12e2-4aef-862e-6e9b19a4ed01 tempest-ServerTagsTestJSON-977934610 tempest-ServerTagsTestJSON-977934610-project-member] Lock "02eb493e-d1a1-4461-8e3f-e493e96fe058" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.565s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.513586] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5270918f-22a0-085d-2b36-8c5a483e5286, 'name': SearchDatastore_Task, 'duration_secs': 0.011526} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.513873] env[63345]: DEBUG oslo_concurrency.lockutils [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.514194] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f/691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 825.514458] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-042b3e2d-99ad-4fcb-8311-20064d343452 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.521565] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 825.521565] env[63345]: value = "task-1017172" [ 825.521565] env[63345]: _type = "Task" [ 825.521565] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.533856] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017172, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.553471] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.763751] env[63345]: DEBUG nova.network.neutron [-] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.786835] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4859bd5a-c52a-4920-aa58-adf503feea6f tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "df2f06af-54a6-4dbd-83ff-1e4b066acbf3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.994s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.810456] env[63345]: DEBUG oslo_concurrency.lockutils [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.828763] env[63345]: INFO nova.compute.manager [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Took 41.79 seconds to build instance. [ 825.907809] env[63345]: DEBUG nova.network.neutron [req-d8d2b839-4bdf-494e-876b-83b40487b6d7 req-04b5995f-e87b-44fe-905e-69d7395b8138 service nova] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Updated VIF entry in instance network info cache for port 07017fee-f295-4317-9453-e41726d715c5. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 825.908165] env[63345]: DEBUG nova.network.neutron [req-d8d2b839-4bdf-494e-876b-83b40487b6d7 req-04b5995f-e87b-44fe-905e-69d7395b8138 service nova] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Updating instance_info_cache with network_info: [{"id": "07017fee-f295-4317-9453-e41726d715c5", "address": "fa:16:3e:d9:6b:c4", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07017fee-f2", "ovs_interfaceid": "07017fee-f295-4317-9453-e41726d715c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.924910] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017171, 'name': CreateVM_Task, 'duration_secs': 0.568717} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.924910] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 825.925447] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.925700] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.926036] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 825.926330] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d04c2fdd-6c2e-461a-a38c-ed129262eae0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.932773] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Waiting for the task: (returnval){ [ 825.932773] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]522f6cf4-d4fc-997e-b2c6-f1c97dad6dd8" [ 825.932773] env[63345]: _type = "Task" [ 825.932773] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.948977] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]522f6cf4-d4fc-997e-b2c6-f1c97dad6dd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.035885] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017172, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.268409] env[63345]: INFO nova.compute.manager [-] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Took 1.34 seconds to deallocate network for instance. [ 826.324294] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-094d25ed-60a9-4b8a-99dc-013e7ed44e60 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.333395] env[63345]: DEBUG oslo_concurrency.lockutils [None req-55f12894-aad3-4952-a76b-ef4ad1914543 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Lock "37f269fe-0266-4c03-9641-e6f43072657a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.308s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.334814] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6012bc30-ee74-4dbb-bf68-e3cdbf6d8e4b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.366042] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee05d32-75ec-480b-ac11-667eb04f1520 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.374729] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c4967c-545a-4a6b-b3f0-a189dbc38ffd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.392345] env[63345]: DEBUG nova.compute.provider_tree [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 826.411998] env[63345]: DEBUG oslo_concurrency.lockutils [req-d8d2b839-4bdf-494e-876b-83b40487b6d7 req-04b5995f-e87b-44fe-905e-69d7395b8138 service nova] Releasing lock "refresh_cache-691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.445769] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]522f6cf4-d4fc-997e-b2c6-f1c97dad6dd8, 'name': SearchDatastore_Task, 'duration_secs': 0.060467} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.446114] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.446364] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 826.446607] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.446759] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.446956] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 826.447309] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0ba2bf6-9453-4933-9200-4d240c91c42e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.457182] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 826.458250] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 826.458998] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c4b9d84-8fa2-4d8e-a105-57119fc36348 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.465647] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Waiting for the task: (returnval){ [ 826.465647] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52559745-ca51-59fb-6305-622b31256dcf" [ 826.465647] env[63345]: _type = "Task" [ 826.465647] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.479343] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52559745-ca51-59fb-6305-622b31256dcf, 'name': SearchDatastore_Task, 'duration_secs': 0.011237} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.480162] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a09ebb1-a1ce-44de-8a2e-abcf5fa6900b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.486214] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Waiting for the task: (returnval){ [ 826.486214] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]526413bc-5446-0e67-5074-9a2e37d34e0f" [ 826.486214] env[63345]: _type = "Task" [ 826.486214] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.496215] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526413bc-5446-0e67-5074-9a2e37d34e0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.514334] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "5e20b33c-1481-4bd3-b269-29a70cc3150d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.514446] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "5e20b33c-1481-4bd3-b269-29a70cc3150d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.535610] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017172, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.557912} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.535966] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f/691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 826.536296] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 826.536625] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b06f4e7d-1d0a-4730-b986-0384e4c49e0f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.546312] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 826.546312] env[63345]: value = "task-1017173" [ 826.546312] env[63345]: _type = "Task" [ 826.546312] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.556092] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017173, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.654328] env[63345]: DEBUG nova.compute.manager [req-db438114-f6b3-4eb8-9f68-0a1f3cc6f7a7 req-322e8552-c74c-499b-aa4f-13cf487309f1 service nova] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Received event network-changed-d58bb0b4-4fd1-4361-b242-9e186e278f08 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 826.654577] env[63345]: DEBUG nova.compute.manager [req-db438114-f6b3-4eb8-9f68-0a1f3cc6f7a7 req-322e8552-c74c-499b-aa4f-13cf487309f1 service nova] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Refreshing instance network info cache due to event network-changed-d58bb0b4-4fd1-4361-b242-9e186e278f08. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 826.654804] env[63345]: DEBUG oslo_concurrency.lockutils [req-db438114-f6b3-4eb8-9f68-0a1f3cc6f7a7 req-322e8552-c74c-499b-aa4f-13cf487309f1 service nova] Acquiring lock "refresh_cache-40d228ea-881e-4442-a16a-6758d061aa39" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.654952] env[63345]: DEBUG oslo_concurrency.lockutils [req-db438114-f6b3-4eb8-9f68-0a1f3cc6f7a7 req-322e8552-c74c-499b-aa4f-13cf487309f1 service nova] Acquired lock "refresh_cache-40d228ea-881e-4442-a16a-6758d061aa39" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.655133] env[63345]: DEBUG nova.network.neutron [req-db438114-f6b3-4eb8-9f68-0a1f3cc6f7a7 req-322e8552-c74c-499b-aa4f-13cf487309f1 service nova] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Refreshing network info cache for port d58bb0b4-4fd1-4361-b242-9e186e278f08 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 826.779196] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.894736] env[63345]: DEBUG nova.scheduler.client.report [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 826.907153] env[63345]: DEBUG oslo_concurrency.lockutils [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.907153] env[63345]: DEBUG oslo_concurrency.lockutils [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.908883] env[63345]: INFO nova.compute.manager [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Attaching volume ae2b09a5-682d-4ef7-9729-fbe017759426 to /dev/sdb [ 826.952230] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576881e0-2f19-480f-834b-bdc798445c07 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.960246] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87440a32-f154-4458-b504-be1e73edca3c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.977277] env[63345]: DEBUG nova.virt.block_device [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Updating existing volume attachment record: 1be45ffc-3b6d-4cdf-b68f-85e6e4c5c831 {{(pid=63345) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 826.996415] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526413bc-5446-0e67-5074-9a2e37d34e0f, 'name': SearchDatastore_Task, 'duration_secs': 0.010601} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.996568] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.996832] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 40d228ea-881e-4442-a16a-6758d061aa39/40d228ea-881e-4442-a16a-6758d061aa39.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 826.997109] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0c2e312c-eccc-4a6a-9a7a-490808f93963 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.005290] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Waiting for the task: (returnval){ [ 827.005290] env[63345]: value = "task-1017174" [ 827.005290] env[63345]: _type = "Task" [ 827.005290] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.018149] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': task-1017174, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.018586] env[63345]: DEBUG nova.compute.manager [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 827.057924] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017173, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073539} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.057924] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 827.058794] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ccc0274-8dd3-489e-9a72-d0aedc301b08 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.083560] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f/691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 827.084262] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1dfd6750-3a26-4c20-acec-5c284ccc0b14 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.105189] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 827.105189] env[63345]: value = "task-1017175" [ 827.105189] env[63345]: _type = "Task" [ 827.105189] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.114497] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017175, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.402305] env[63345]: DEBUG oslo_concurrency.lockutils [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.481s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.403103] env[63345]: DEBUG nova.compute.manager [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 827.408275] env[63345]: DEBUG oslo_concurrency.lockutils [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.706s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.411767] env[63345]: INFO nova.compute.claims [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 827.464319] env[63345]: DEBUG nova.compute.manager [req-455284ac-7081-4dc2-a198-a5b3a53e4657 req-8ddcfaa1-dbae-4fda-b925-9e875f4c05d1 service nova] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Received event network-vif-deleted-f14abadb-834f-4695-87e7-c79a8d8b328e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 827.488945] env[63345]: DEBUG nova.network.neutron [req-db438114-f6b3-4eb8-9f68-0a1f3cc6f7a7 req-322e8552-c74c-499b-aa4f-13cf487309f1 service nova] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Updated VIF entry in instance network info cache for port d58bb0b4-4fd1-4361-b242-9e186e278f08. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 827.489594] env[63345]: DEBUG nova.network.neutron [req-db438114-f6b3-4eb8-9f68-0a1f3cc6f7a7 req-322e8552-c74c-499b-aa4f-13cf487309f1 service nova] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Updating instance_info_cache with network_info: [{"id": "d58bb0b4-4fd1-4361-b242-9e186e278f08", "address": "fa:16:3e:9e:a3:48", "network": {"id": "18285fd9-d154-415c-acbb-1494303e3b6c", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.69", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5dc99cc64e6c4d83928b309253a8df8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd58bb0b4-4f", "ovs_interfaceid": "d58bb0b4-4fd1-4361-b242-9e186e278f08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.520050] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': task-1017174, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.548236] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.618818] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017175, 'name': ReconfigVM_Task, 'duration_secs': 0.508485} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.619498] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f/691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 827.621029] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb7df1c5-e6b8-4650-b6bf-2ccb36075a49 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.641534] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 827.641534] env[63345]: value = "task-1017179" [ 827.641534] env[63345]: _type = "Task" [ 827.641534] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.653572] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017179, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.917308] env[63345]: DEBUG nova.compute.utils [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 827.925507] env[63345]: DEBUG nova.compute.manager [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 827.925699] env[63345]: DEBUG nova.network.neutron [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 827.994382] env[63345]: DEBUG oslo_concurrency.lockutils [req-db438114-f6b3-4eb8-9f68-0a1f3cc6f7a7 req-322e8552-c74c-499b-aa4f-13cf487309f1 service nova] Releasing lock "refresh_cache-40d228ea-881e-4442-a16a-6758d061aa39" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.994382] env[63345]: DEBUG nova.compute.manager [req-db438114-f6b3-4eb8-9f68-0a1f3cc6f7a7 req-322e8552-c74c-499b-aa4f-13cf487309f1 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Received event network-changed-025d1e18-19a3-43ce-9db9-1590137a5544 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 827.994382] env[63345]: DEBUG nova.compute.manager [req-db438114-f6b3-4eb8-9f68-0a1f3cc6f7a7 req-322e8552-c74c-499b-aa4f-13cf487309f1 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Refreshing instance network info cache due to event network-changed-025d1e18-19a3-43ce-9db9-1590137a5544. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 827.994382] env[63345]: DEBUG oslo_concurrency.lockutils [req-db438114-f6b3-4eb8-9f68-0a1f3cc6f7a7 req-322e8552-c74c-499b-aa4f-13cf487309f1 service nova] Acquiring lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.994382] env[63345]: DEBUG oslo_concurrency.lockutils [req-db438114-f6b3-4eb8-9f68-0a1f3cc6f7a7 req-322e8552-c74c-499b-aa4f-13cf487309f1 service nova] Acquired lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.994382] env[63345]: DEBUG nova.network.neutron [req-db438114-f6b3-4eb8-9f68-0a1f3cc6f7a7 req-322e8552-c74c-499b-aa4f-13cf487309f1 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Refreshing network info cache for port 025d1e18-19a3-43ce-9db9-1590137a5544 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 828.017999] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': task-1017174, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.661001} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.021941] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 40d228ea-881e-4442-a16a-6758d061aa39/40d228ea-881e-4442-a16a-6758d061aa39.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 828.021941] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 828.021941] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb5dc17b-7d8f-4ff2-9723-d044783b4807 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.027967] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Waiting for the task: (returnval){ [ 828.027967] env[63345]: value = "task-1017180" [ 828.027967] env[63345]: _type = "Task" [ 828.027967] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.039531] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': task-1017180, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.048166] env[63345]: DEBUG nova.policy [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f908b41c13824ddeb5fa5648e8750aa9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34efcd7d600f49698c6619be002d838f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 828.153733] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017179, 'name': Rename_Task, 'duration_secs': 0.323544} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.154432] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 828.154911] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4da32818-80a0-409e-92ae-3f612e9c1093 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.165203] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 828.165203] env[63345]: value = "task-1017181" [ 828.165203] env[63345]: _type = "Task" [ 828.165203] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.174949] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017181, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.179368] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Acquiring lock "37f269fe-0266-4c03-9641-e6f43072657a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.182166] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Lock "37f269fe-0266-4c03-9641-e6f43072657a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.182166] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Acquiring lock "37f269fe-0266-4c03-9641-e6f43072657a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.182166] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Lock "37f269fe-0266-4c03-9641-e6f43072657a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.182166] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Lock "37f269fe-0266-4c03-9641-e6f43072657a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.184211] env[63345]: INFO nova.compute.manager [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Terminating instance [ 828.385133] env[63345]: DEBUG nova.compute.manager [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 828.385133] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0b8f43-b7ee-487a-986b-9537f4dd9015 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.428776] env[63345]: DEBUG nova.compute.manager [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 828.542714] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': task-1017180, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079827} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.543155] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 828.544087] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff0e3ad-aa61-41c2-accc-b709a3c0ff15 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.572087] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 40d228ea-881e-4442-a16a-6758d061aa39/40d228ea-881e-4442-a16a-6758d061aa39.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 828.576840] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9cabce3a-54cc-441f-bda4-1c706ac1629c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.605358] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Waiting for the task: (returnval){ [ 828.605358] env[63345]: value = "task-1017182" [ 828.605358] env[63345]: _type = "Task" [ 828.605358] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.616189] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': task-1017182, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.679888] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017181, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.688978] env[63345]: DEBUG nova.compute.manager [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 828.689309] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 828.690437] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d8f124-9674-4650-832b-9102dc7d5316 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.702498] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 828.702775] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec8a3291-ba93-4902-b306-7b59c7483add {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.706511] env[63345]: DEBUG nova.network.neutron [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Successfully created port: 56e0ca56-d9a3-439a-b072-ad4f8da026e8 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 828.711047] env[63345]: DEBUG oslo_vmware.api [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Waiting for the task: (returnval){ [ 828.711047] env[63345]: value = "task-1017183" [ 828.711047] env[63345]: _type = "Task" [ 828.711047] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.731532] env[63345]: DEBUG oslo_vmware.api [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': task-1017183, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.859371] env[63345]: DEBUG nova.network.neutron [req-db438114-f6b3-4eb8-9f68-0a1f3cc6f7a7 req-322e8552-c74c-499b-aa4f-13cf487309f1 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Updated VIF entry in instance network info cache for port 025d1e18-19a3-43ce-9db9-1590137a5544. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 828.859756] env[63345]: DEBUG nova.network.neutron [req-db438114-f6b3-4eb8-9f68-0a1f3cc6f7a7 req-322e8552-c74c-499b-aa4f-13cf487309f1 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Updating instance_info_cache with network_info: [{"id": "025d1e18-19a3-43ce-9db9-1590137a5544", "address": "fa:16:3e:9b:36:a9", "network": {"id": "95d95c9b-b21c-4ee5-ab54-d0bf2699d38e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-88421441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba08f64c26d245a8b8f2b52ea97c2f1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap025d1e18-19", "ovs_interfaceid": "025d1e18-19a3-43ce-9db9-1590137a5544", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.897675] env[63345]: INFO nova.compute.manager [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] instance snapshotting [ 828.901969] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e344b61-684e-42eb-9e9f-f08edd226acb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.923494] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0954e633-6d3a-4bb0-9c9d-c63f3e4e122d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.957754] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e51596-ea71-4ac1-89b4-2fc9888c0eba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.966331] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71216336-a74c-42ba-93dd-cae6c4c866e8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.002579] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e8871e-76b3-4acb-b3f1-bbe4f2ffa8ff {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.013719] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a705a19f-4e2a-4729-bf1a-5595174fbd7d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.030228] env[63345]: DEBUG nova.compute.provider_tree [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.116120] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': task-1017182, 'name': ReconfigVM_Task, 'duration_secs': 0.457545} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.116368] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 40d228ea-881e-4442-a16a-6758d061aa39/40d228ea-881e-4442-a16a-6758d061aa39.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 829.117028] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7ee9ef8-0467-4068-b709-5b4c378dbeca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.123893] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Waiting for the task: (returnval){ [ 829.123893] env[63345]: value = "task-1017184" [ 829.123893] env[63345]: _type = "Task" [ 829.123893] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.133599] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': task-1017184, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.175405] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017181, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.227385] env[63345]: DEBUG oslo_vmware.api [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': task-1017183, 'name': PowerOffVM_Task, 'duration_secs': 0.219094} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.227797] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 829.228088] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 829.228384] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3265beff-9753-442e-b4df-2151a534d098 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.325649] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 829.325832] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 829.326095] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Deleting the datastore file [datastore2] 37f269fe-0266-4c03-9641-e6f43072657a {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 829.326461] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-04447c72-0d96-4f10-8ac2-f883e64fb0be {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.335497] env[63345]: DEBUG oslo_vmware.api [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Waiting for the task: (returnval){ [ 829.335497] env[63345]: value = "task-1017186" [ 829.335497] env[63345]: _type = "Task" [ 829.335497] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.347534] env[63345]: DEBUG oslo_vmware.api [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': task-1017186, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.364247] env[63345]: DEBUG oslo_concurrency.lockutils [req-db438114-f6b3-4eb8-9f68-0a1f3cc6f7a7 req-322e8552-c74c-499b-aa4f-13cf487309f1 service nova] Releasing lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.439931] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Creating Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 829.440910] env[63345]: DEBUG nova.compute.manager [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 829.443387] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-be6db9b4-2738-45b1-90f0-1754b60883c4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.452763] env[63345]: DEBUG oslo_vmware.api [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 829.452763] env[63345]: value = "task-1017188" [ 829.452763] env[63345]: _type = "Task" [ 829.452763] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.462206] env[63345]: DEBUG oslo_vmware.api [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017188, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.481424] env[63345]: DEBUG nova.virt.hardware [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 829.481755] env[63345]: DEBUG nova.virt.hardware [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 829.481945] env[63345]: DEBUG nova.virt.hardware [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 829.482390] env[63345]: DEBUG nova.virt.hardware [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 829.482658] env[63345]: DEBUG nova.virt.hardware [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 829.482968] env[63345]: DEBUG nova.virt.hardware [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 829.483375] env[63345]: DEBUG nova.virt.hardware [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 829.483636] env[63345]: DEBUG nova.virt.hardware [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 829.483879] env[63345]: DEBUG nova.virt.hardware [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 829.484158] env[63345]: DEBUG nova.virt.hardware [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 829.484420] env[63345]: DEBUG nova.virt.hardware [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 829.485523] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56e1f02-2954-48cf-984b-d70e012e1639 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.495459] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36dd48b8-2b96-4cd3-87b6-2898af71200c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.534117] env[63345]: DEBUG nova.scheduler.client.report [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 829.640663] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': task-1017184, 'name': Rename_Task, 'duration_secs': 0.234005} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.640951] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 829.641243] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1be4e418-7448-4c95-af7b-0bc52f211319 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.649360] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Waiting for the task: (returnval){ [ 829.649360] env[63345]: value = "task-1017189" [ 829.649360] env[63345]: _type = "Task" [ 829.649360] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.657216] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': task-1017189, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.674069] env[63345]: DEBUG oslo_vmware.api [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017181, 'name': PowerOnVM_Task, 'duration_secs': 1.169356} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.674381] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 829.674602] env[63345]: INFO nova.compute.manager [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Took 10.19 seconds to spawn the instance on the hypervisor. [ 829.674789] env[63345]: DEBUG nova.compute.manager [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 829.675605] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b4d6f2-3a41-4460-85a4-1689fc85d709 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.846976] env[63345]: DEBUG oslo_vmware.api [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Task: {'id': task-1017186, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.250826} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.847285] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 829.847478] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 829.847662] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 829.847843] env[63345]: INFO nova.compute.manager [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Took 1.16 seconds to destroy the instance on the hypervisor. [ 829.848106] env[63345]: DEBUG oslo.service.loopingcall [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 829.848336] env[63345]: DEBUG nova.compute.manager [-] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 829.848438] env[63345]: DEBUG nova.network.neutron [-] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 829.967647] env[63345]: DEBUG oslo_vmware.api [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017188, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.039256] env[63345]: DEBUG oslo_concurrency.lockutils [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.631s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.039887] env[63345]: DEBUG nova.compute.manager [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 830.043577] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.310s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.043899] env[63345]: DEBUG nova.objects.instance [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lazy-loading 'resources' on Instance uuid 85fb1ecd-4ca3-401d-a87a-131f0b275506 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 830.160436] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': task-1017189, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.182876] env[63345]: DEBUG nova.compute.manager [req-d333f9b1-6aed-44e0-8eb3-b01d8243c2cd req-be2235fe-c5b5-4d9b-a41a-95075ec95a7f service nova] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Received event network-vif-deleted-af759cda-f432-45a8-afdd-ead0d3533779 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 830.183173] env[63345]: INFO nova.compute.manager [req-d333f9b1-6aed-44e0-8eb3-b01d8243c2cd req-be2235fe-c5b5-4d9b-a41a-95075ec95a7f service nova] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Neutron deleted interface af759cda-f432-45a8-afdd-ead0d3533779; detaching it from the instance and deleting it from the info cache [ 830.183286] env[63345]: DEBUG nova.network.neutron [req-d333f9b1-6aed-44e0-8eb3-b01d8243c2cd req-be2235fe-c5b5-4d9b-a41a-95075ec95a7f service nova] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.196246] env[63345]: INFO nova.compute.manager [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Took 44.74 seconds to build instance. [ 830.464949] env[63345]: DEBUG oslo_vmware.api [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017188, 'name': CreateSnapshot_Task, 'duration_secs': 0.984889} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.465468] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Created Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 830.466244] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b5b3c8-42cd-4942-8a8b-d0aa5a81e52e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.547383] env[63345]: DEBUG nova.compute.utils [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 830.553159] env[63345]: DEBUG nova.compute.manager [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 830.553159] env[63345]: DEBUG nova.network.neutron [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 830.596510] env[63345]: DEBUG nova.policy [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fce058d27d8e4da19af436b282b37f32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63d7b3facae6416989f763e610cf98f7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 830.641182] env[63345]: DEBUG nova.network.neutron [-] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.662305] env[63345]: DEBUG oslo_vmware.api [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': task-1017189, 'name': PowerOnVM_Task, 'duration_secs': 0.795146} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.665468] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 830.665468] env[63345]: INFO nova.compute.manager [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Took 8.24 seconds to spawn the instance on the hypervisor. [ 830.666187] env[63345]: DEBUG nova.compute.manager [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 830.667631] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c454ccb-51c6-4fdc-be60-3838f5dd5c2a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.691463] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6880f5b4-6a99-4c7a-a043-f21501e25128 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.698544] env[63345]: DEBUG oslo_concurrency.lockutils [None req-afe72f1d-f70f-46d3-90e6-08e9930ca46e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.462s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.703193] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-060bcc07-5a5b-48fe-9eb5-ddaf8f589899 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.745850] env[63345]: DEBUG nova.compute.manager [req-d333f9b1-6aed-44e0-8eb3-b01d8243c2cd req-be2235fe-c5b5-4d9b-a41a-95075ec95a7f service nova] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Detach interface failed, port_id=af759cda-f432-45a8-afdd-ead0d3533779, reason: Instance 37f269fe-0266-4c03-9641-e6f43072657a could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 830.871587] env[63345]: DEBUG nova.network.neutron [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Successfully created port: a8479a06-71fa-42d6-a093-13fcbbae3778 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 830.966202] env[63345]: DEBUG nova.network.neutron [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Successfully updated port: 56e0ca56-d9a3-439a-b072-ad4f8da026e8 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 830.988936] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Creating linked-clone VM from snapshot {{(pid=63345) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 830.993709] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-967a5307-af5a-45af-b0ef-05c48db8dfb2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.008119] env[63345]: DEBUG nova.compute.manager [req-10f3572d-b0bc-4ec9-861f-39b3c045eba5 req-2ceb86e3-9c31-46a8-8c49-9306400933e0 service nova] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Received event network-vif-plugged-56e0ca56-d9a3-439a-b072-ad4f8da026e8 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 831.008119] env[63345]: DEBUG oslo_concurrency.lockutils [req-10f3572d-b0bc-4ec9-861f-39b3c045eba5 req-2ceb86e3-9c31-46a8-8c49-9306400933e0 service nova] Acquiring lock "dde93fd5-6312-4d91-b041-b7fc84b207d3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.008792] env[63345]: DEBUG oslo_concurrency.lockutils [req-10f3572d-b0bc-4ec9-861f-39b3c045eba5 req-2ceb86e3-9c31-46a8-8c49-9306400933e0 service nova] Lock "dde93fd5-6312-4d91-b041-b7fc84b207d3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.008792] env[63345]: DEBUG oslo_concurrency.lockutils [req-10f3572d-b0bc-4ec9-861f-39b3c045eba5 req-2ceb86e3-9c31-46a8-8c49-9306400933e0 service nova] Lock "dde93fd5-6312-4d91-b041-b7fc84b207d3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.008792] env[63345]: DEBUG nova.compute.manager [req-10f3572d-b0bc-4ec9-861f-39b3c045eba5 req-2ceb86e3-9c31-46a8-8c49-9306400933e0 service nova] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] No waiting events found dispatching network-vif-plugged-56e0ca56-d9a3-439a-b072-ad4f8da026e8 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 831.009218] env[63345]: WARNING nova.compute.manager [req-10f3572d-b0bc-4ec9-861f-39b3c045eba5 req-2ceb86e3-9c31-46a8-8c49-9306400933e0 service nova] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Received unexpected event network-vif-plugged-56e0ca56-d9a3-439a-b072-ad4f8da026e8 for instance with vm_state building and task_state spawning. [ 831.011194] env[63345]: DEBUG oslo_vmware.api [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 831.011194] env[63345]: value = "task-1017190" [ 831.011194] env[63345]: _type = "Task" [ 831.011194] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.025897] env[63345]: DEBUG oslo_vmware.api [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017190, 'name': CloneVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.056679] env[63345]: DEBUG nova.compute.manager [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 831.093969] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0c0313-6b0d-4881-81d9-51529c063dea {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.104488] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d66809c-271c-4f25-9299-406012b27a6b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.141755] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf23d8a3-ae09-40e6-a79f-248c85c5f428 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.146693] env[63345]: INFO nova.compute.manager [-] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Took 1.30 seconds to deallocate network for instance. [ 831.156176] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0a5e7d-a9c7-4751-afff-33744df29f22 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.177018] env[63345]: DEBUG nova.compute.provider_tree [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.189165] env[63345]: INFO nova.compute.manager [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Took 36.03 seconds to build instance. [ 831.469406] env[63345]: DEBUG oslo_concurrency.lockutils [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "refresh_cache-dde93fd5-6312-4d91-b041-b7fc84b207d3" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.469667] env[63345]: DEBUG oslo_concurrency.lockutils [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquired lock "refresh_cache-dde93fd5-6312-4d91-b041-b7fc84b207d3" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.469825] env[63345]: DEBUG nova.network.neutron [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 831.522658] env[63345]: DEBUG oslo_vmware.api [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017190, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.548315] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Volume attach. Driver type: vmdk {{(pid=63345) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 831.548315] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226058', 'volume_id': 'ae2b09a5-682d-4ef7-9729-fbe017759426', 'name': 'volume-ae2b09a5-682d-4ef7-9729-fbe017759426', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc9d2e6a-f77a-4a21-90bc-81949cbfce91', 'attached_at': '', 'detached_at': '', 'volume_id': 'ae2b09a5-682d-4ef7-9729-fbe017759426', 'serial': 'ae2b09a5-682d-4ef7-9729-fbe017759426'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 831.549697] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e33901f-704a-4c14-b7b3-fa2d8074e679 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.573685] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-054143bd-be12-46bd-a555-be1464a3301a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.604271] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] volume-ae2b09a5-682d-4ef7-9729-fbe017759426/volume-ae2b09a5-682d-4ef7-9729-fbe017759426.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 831.604648] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-affd6f4b-6a1c-4bc4-a2fe-effc5766d696 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.625200] env[63345]: DEBUG oslo_vmware.api [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 831.625200] env[63345]: value = "task-1017191" [ 831.625200] env[63345]: _type = "Task" [ 831.625200] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.639057] env[63345]: DEBUG oslo_vmware.api [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017191, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.663885] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.678452] env[63345]: DEBUG nova.scheduler.client.report [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 831.691545] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fc949521-dd05-44c3-a207-b3374d8be8b8 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Lock "40d228ea-881e-4442-a16a-6758d061aa39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.546s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.836468] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Acquiring lock "cb712d80-be78-4c19-a891-329011521f30" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.836588] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Lock "cb712d80-be78-4c19-a891-329011521f30" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.836956] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Acquiring lock "cb712d80-be78-4c19-a891-329011521f30-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.836956] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Lock "cb712d80-be78-4c19-a891-329011521f30-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.837193] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Lock "cb712d80-be78-4c19-a891-329011521f30-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.839672] env[63345]: INFO nova.compute.manager [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Terminating instance [ 831.973635] env[63345]: DEBUG nova.compute.manager [None req-3ebd7e36-4bde-40a5-96ff-69db9844151d tempest-ServerDiagnosticsTest-1748031819 tempest-ServerDiagnosticsTest-1748031819-project-admin] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 831.980231] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb9911f-09cc-479c-a2af-37f7c80d70e8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.989574] env[63345]: INFO nova.compute.manager [None req-3ebd7e36-4bde-40a5-96ff-69db9844151d tempest-ServerDiagnosticsTest-1748031819 tempest-ServerDiagnosticsTest-1748031819-project-admin] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Retrieving diagnostics [ 831.990479] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37acf0c-517c-46ad-bd27-8c4c8c4a65e9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.023826] env[63345]: DEBUG nova.network.neutron [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 832.037223] env[63345]: DEBUG oslo_vmware.api [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017190, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.071232] env[63345]: DEBUG nova.compute.manager [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 832.099943] env[63345]: DEBUG nova.virt.hardware [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 832.100233] env[63345]: DEBUG nova.virt.hardware [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 832.100404] env[63345]: DEBUG nova.virt.hardware [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 832.100594] env[63345]: DEBUG nova.virt.hardware [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 832.100749] env[63345]: DEBUG nova.virt.hardware [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 832.101025] env[63345]: DEBUG nova.virt.hardware [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 832.101122] env[63345]: DEBUG nova.virt.hardware [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 832.101292] env[63345]: DEBUG nova.virt.hardware [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 832.101467] env[63345]: DEBUG nova.virt.hardware [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 832.101633] env[63345]: DEBUG nova.virt.hardware [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 832.101847] env[63345]: DEBUG nova.virt.hardware [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 832.102829] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e75e291-eebe-4206-91ca-27a4fa1f21e7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.114228] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83dfc467-012f-4c3e-8d8b-96c4150e195a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.136654] env[63345]: DEBUG oslo_vmware.api [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017191, 'name': ReconfigVM_Task, 'duration_secs': 0.394333} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.136989] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Reconfigured VM instance instance-0000001c to attach disk [datastore2] volume-ae2b09a5-682d-4ef7-9729-fbe017759426/volume-ae2b09a5-682d-4ef7-9729-fbe017759426.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 832.142353] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c6aca53-c115-41f1-aa88-922b8f6c5844 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.163438] env[63345]: DEBUG oslo_vmware.api [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 832.163438] env[63345]: value = "task-1017192" [ 832.163438] env[63345]: _type = "Task" [ 832.163438] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.178765] env[63345]: DEBUG oslo_vmware.api [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017192, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.184782] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.141s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.187335] env[63345]: DEBUG oslo_concurrency.lockutils [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.309s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.187591] env[63345]: DEBUG nova.objects.instance [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lazy-loading 'resources' on Instance uuid fe3e2b2a-9583-482e-b69b-6c130801d7db {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 832.206615] env[63345]: DEBUG nova.network.neutron [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Updating instance_info_cache with network_info: [{"id": "56e0ca56-d9a3-439a-b072-ad4f8da026e8", "address": "fa:16:3e:0e:39:69", "network": {"id": "dc725254-60a8-4edc-aab2-604dfb70677d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1100061234-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "34efcd7d600f49698c6619be002d838f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56e0ca56-d9", "ovs_interfaceid": "56e0ca56-d9a3-439a-b072-ad4f8da026e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.211237] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d78bf12-3c5c-471b-b2c2-90e73c9dea51 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.211669] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d78bf12-3c5c-471b-b2c2-90e73c9dea51 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.212022] env[63345]: DEBUG nova.compute.manager [None req-0d78bf12-3c5c-471b-b2c2-90e73c9dea51 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 832.213908] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-195779c6-b7c8-480d-b7ea-dfdf4c528ccf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.222276] env[63345]: DEBUG nova.compute.manager [None req-0d78bf12-3c5c-471b-b2c2-90e73c9dea51 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63345) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3403}} [ 832.222985] env[63345]: DEBUG nova.objects.instance [None req-0d78bf12-3c5c-471b-b2c2-90e73c9dea51 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lazy-loading 'flavor' on Instance uuid 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 832.225740] env[63345]: INFO nova.scheduler.client.report [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Deleted allocations for instance 85fb1ecd-4ca3-401d-a87a-131f0b275506 [ 832.343693] env[63345]: DEBUG nova.compute.manager [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 832.343950] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 832.345263] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764043a3-adcc-49aa-825c-9a84b8983cd4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.359434] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 832.359772] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f7fbac0-dcd3-46cf-9284-f214343bd7c9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.367387] env[63345]: DEBUG oslo_vmware.api [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Waiting for the task: (returnval){ [ 832.367387] env[63345]: value = "task-1017193" [ 832.367387] env[63345]: _type = "Task" [ 832.367387] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.377371] env[63345]: DEBUG oslo_vmware.api [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': task-1017193, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.478954] env[63345]: DEBUG nova.network.neutron [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Successfully updated port: a8479a06-71fa-42d6-a093-13fcbbae3778 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 832.533651] env[63345]: DEBUG oslo_vmware.api [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017190, 'name': CloneVM_Task, 'duration_secs': 1.502377} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.533968] env[63345]: INFO nova.virt.vmwareapi.vmops [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Created linked-clone VM from snapshot [ 832.534850] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92fee0d8-3237-4154-b3cd-e7a8796e9781 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.544609] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Uploading image fd7587f1-3c12-4603-ab6e-b5e1ea2d7b64 {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 832.559315] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Destroying the VM {{(pid=63345) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 832.560097] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4be7bee7-f783-4fd1-92f8-5cb435f4b856 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.570054] env[63345]: DEBUG oslo_vmware.api [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 832.570054] env[63345]: value = "task-1017194" [ 832.570054] env[63345]: _type = "Task" [ 832.570054] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.580252] env[63345]: DEBUG oslo_vmware.api [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017194, 'name': Destroy_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.599235] env[63345]: DEBUG nova.compute.manager [req-4e87bcbd-c366-4ec4-bcb2-d09c81af2006 req-84591684-ef36-4694-9090-ceb48759d372 service nova] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Received event network-vif-plugged-a8479a06-71fa-42d6-a093-13fcbbae3778 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 832.599521] env[63345]: DEBUG oslo_concurrency.lockutils [req-4e87bcbd-c366-4ec4-bcb2-d09c81af2006 req-84591684-ef36-4694-9090-ceb48759d372 service nova] Acquiring lock "f37b4a95-0725-4a84-b726-fd4f26e87020-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.599737] env[63345]: DEBUG oslo_concurrency.lockutils [req-4e87bcbd-c366-4ec4-bcb2-d09c81af2006 req-84591684-ef36-4694-9090-ceb48759d372 service nova] Lock "f37b4a95-0725-4a84-b726-fd4f26e87020-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.599848] env[63345]: DEBUG oslo_concurrency.lockutils [req-4e87bcbd-c366-4ec4-bcb2-d09c81af2006 req-84591684-ef36-4694-9090-ceb48759d372 service nova] Lock "f37b4a95-0725-4a84-b726-fd4f26e87020-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.600022] env[63345]: DEBUG nova.compute.manager [req-4e87bcbd-c366-4ec4-bcb2-d09c81af2006 req-84591684-ef36-4694-9090-ceb48759d372 service nova] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] No waiting events found dispatching network-vif-plugged-a8479a06-71fa-42d6-a093-13fcbbae3778 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 832.600227] env[63345]: WARNING nova.compute.manager [req-4e87bcbd-c366-4ec4-bcb2-d09c81af2006 req-84591684-ef36-4694-9090-ceb48759d372 service nova] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Received unexpected event network-vif-plugged-a8479a06-71fa-42d6-a093-13fcbbae3778 for instance with vm_state building and task_state spawning. [ 832.680330] env[63345]: DEBUG oslo_vmware.api [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017192, 'name': ReconfigVM_Task, 'duration_secs': 0.164569} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.680450] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226058', 'volume_id': 'ae2b09a5-682d-4ef7-9729-fbe017759426', 'name': 'volume-ae2b09a5-682d-4ef7-9729-fbe017759426', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc9d2e6a-f77a-4a21-90bc-81949cbfce91', 'attached_at': '', 'detached_at': '', 'volume_id': 'ae2b09a5-682d-4ef7-9729-fbe017759426', 'serial': 'ae2b09a5-682d-4ef7-9729-fbe017759426'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 832.714020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Releasing lock "refresh_cache-dde93fd5-6312-4d91-b041-b7fc84b207d3" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.714353] env[63345]: DEBUG nova.compute.manager [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Instance network_info: |[{"id": "56e0ca56-d9a3-439a-b072-ad4f8da026e8", "address": "fa:16:3e:0e:39:69", "network": {"id": "dc725254-60a8-4edc-aab2-604dfb70677d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1100061234-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "34efcd7d600f49698c6619be002d838f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56e0ca56-d9", "ovs_interfaceid": "56e0ca56-d9a3-439a-b072-ad4f8da026e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 832.715319] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:39:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b00fe87c-d828-442f-bd09-e9018c468557', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '56e0ca56-d9a3-439a-b072-ad4f8da026e8', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 832.723626] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Creating folder: Project (34efcd7d600f49698c6619be002d838f). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 832.724405] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a234e8fb-7cf3-452f-8e41-6605e8f9cae3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.735049] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8479a8c2-85cb-4f55-89bb-1d6962627a93 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "85fb1ecd-4ca3-401d-a87a-131f0b275506" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.588s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.744825] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Created folder: Project (34efcd7d600f49698c6619be002d838f) in parent group-v225918. [ 832.745050] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Creating folder: Instances. Parent ref: group-v226061. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 832.745407] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-42c31944-6708-4f63-95f7-a7946dd3c57c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.758589] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Created folder: Instances in parent group-v226061. [ 832.758850] env[63345]: DEBUG oslo.service.loopingcall [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 832.759255] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 832.759384] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-05cfd892-43a4-4454-81fa-615e73fbe4cd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.785387] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 832.785387] env[63345]: value = "task-1017197" [ 832.785387] env[63345]: _type = "Task" [ 832.785387] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.798345] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017197, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.878960] env[63345]: DEBUG oslo_vmware.api [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': task-1017193, 'name': PowerOffVM_Task, 'duration_secs': 0.224219} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.879365] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 832.879610] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 832.879909] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a97f253c-7b4e-4452-85f6-8ec968ae8a0b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.981846] env[63345]: DEBUG oslo_concurrency.lockutils [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "refresh_cache-f37b4a95-0725-4a84-b726-fd4f26e87020" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.982106] env[63345]: DEBUG oslo_concurrency.lockutils [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "refresh_cache-f37b4a95-0725-4a84-b726-fd4f26e87020" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.982340] env[63345]: DEBUG nova.network.neutron [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 833.040678] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 833.041059] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Deleting contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 833.041298] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Deleting the datastore file [datastore1] cb712d80-be78-4c19-a891-329011521f30 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 833.041632] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-08076834-b16e-493a-8d20-b6eae30e4b38 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.052510] env[63345]: DEBUG oslo_vmware.api [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Waiting for the task: (returnval){ [ 833.052510] env[63345]: value = "task-1017199" [ 833.052510] env[63345]: _type = "Task" [ 833.052510] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.062264] env[63345]: DEBUG oslo_vmware.api [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': task-1017199, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.081346] env[63345]: DEBUG oslo_vmware.api [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017194, 'name': Destroy_Task} progress is 33%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.130272] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7128046-afe4-4f9f-9423-bb398f5cc1d1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.139334] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d419ece-2875-4584-81c1-67b3c300e029 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.174340] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-098852e5-bb0b-4888-8bac-150d4f4b256c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.180932] env[63345]: DEBUG oslo_concurrency.lockutils [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Acquiring lock "40d228ea-881e-4442-a16a-6758d061aa39" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.181339] env[63345]: DEBUG oslo_concurrency.lockutils [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Lock "40d228ea-881e-4442-a16a-6758d061aa39" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.181574] env[63345]: DEBUG oslo_concurrency.lockutils [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Acquiring lock "40d228ea-881e-4442-a16a-6758d061aa39-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.181849] env[63345]: DEBUG oslo_concurrency.lockutils [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Lock "40d228ea-881e-4442-a16a-6758d061aa39-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.182077] env[63345]: DEBUG oslo_concurrency.lockutils [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Lock "40d228ea-881e-4442-a16a-6758d061aa39-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.189079] env[63345]: INFO nova.compute.manager [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Terminating instance [ 833.192399] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0d2af4-229b-4993-b547-8194e9b001c5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.208578] env[63345]: DEBUG nova.compute.provider_tree [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.236530] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d78bf12-3c5c-471b-b2c2-90e73c9dea51 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 833.237031] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb0a40ce-756a-4cdb-88ad-d0c101d86a52 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.246629] env[63345]: DEBUG oslo_vmware.api [None req-0d78bf12-3c5c-471b-b2c2-90e73c9dea51 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 833.246629] env[63345]: value = "task-1017200" [ 833.246629] env[63345]: _type = "Task" [ 833.246629] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.261483] env[63345]: DEBUG oslo_vmware.api [None req-0d78bf12-3c5c-471b-b2c2-90e73c9dea51 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017200, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.272324] env[63345]: DEBUG nova.compute.manager [req-c369db26-e437-4100-9e6e-0c239cd6e809 req-fcfffd3c-8e6e-48fc-b0c7-7243af89e082 service nova] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Received event network-changed-56e0ca56-d9a3-439a-b072-ad4f8da026e8 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 833.272466] env[63345]: DEBUG nova.compute.manager [req-c369db26-e437-4100-9e6e-0c239cd6e809 req-fcfffd3c-8e6e-48fc-b0c7-7243af89e082 service nova] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Refreshing instance network info cache due to event network-changed-56e0ca56-d9a3-439a-b072-ad4f8da026e8. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 833.272683] env[63345]: DEBUG oslo_concurrency.lockutils [req-c369db26-e437-4100-9e6e-0c239cd6e809 req-fcfffd3c-8e6e-48fc-b0c7-7243af89e082 service nova] Acquiring lock "refresh_cache-dde93fd5-6312-4d91-b041-b7fc84b207d3" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.272831] env[63345]: DEBUG oslo_concurrency.lockutils [req-c369db26-e437-4100-9e6e-0c239cd6e809 req-fcfffd3c-8e6e-48fc-b0c7-7243af89e082 service nova] Acquired lock "refresh_cache-dde93fd5-6312-4d91-b041-b7fc84b207d3" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.272998] env[63345]: DEBUG nova.network.neutron [req-c369db26-e437-4100-9e6e-0c239cd6e809 req-fcfffd3c-8e6e-48fc-b0c7-7243af89e082 service nova] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Refreshing network info cache for port 56e0ca56-d9a3-439a-b072-ad4f8da026e8 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 833.298597] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017197, 'name': CreateVM_Task, 'duration_secs': 0.478654} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.298775] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 833.299742] env[63345]: DEBUG oslo_concurrency.lockutils [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.299926] env[63345]: DEBUG oslo_concurrency.lockutils [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.300314] env[63345]: DEBUG oslo_concurrency.lockutils [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 833.301308] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52879783-ea17-4312-b865-ec60aa2015dc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.307078] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 833.307078] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52bc024a-a9fa-339f-8cf2-889c0a977f1b" [ 833.307078] env[63345]: _type = "Task" [ 833.307078] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.316821] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52bc024a-a9fa-339f-8cf2-889c0a977f1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.521051] env[63345]: DEBUG nova.network.neutron [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 833.563501] env[63345]: DEBUG oslo_vmware.api [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Task: {'id': task-1017199, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16574} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.563727] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 833.563853] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Deleted contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 833.564041] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 833.564239] env[63345]: INFO nova.compute.manager [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] [instance: cb712d80-be78-4c19-a891-329011521f30] Took 1.22 seconds to destroy the instance on the hypervisor. [ 833.564497] env[63345]: DEBUG oslo.service.loopingcall [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 833.564706] env[63345]: DEBUG nova.compute.manager [-] [instance: cb712d80-be78-4c19-a891-329011521f30] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 833.564807] env[63345]: DEBUG nova.network.neutron [-] [instance: cb712d80-be78-4c19-a891-329011521f30] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 833.580571] env[63345]: DEBUG oslo_vmware.api [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017194, 'name': Destroy_Task, 'duration_secs': 0.549985} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.580871] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Destroyed the VM [ 833.581474] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Deleting Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 833.581648] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bdfcc85b-ada8-46a3-9ba8-6132dc3fd4b0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.593952] env[63345]: DEBUG oslo_vmware.api [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 833.593952] env[63345]: value = "task-1017201" [ 833.593952] env[63345]: _type = "Task" [ 833.593952] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.608847] env[63345]: DEBUG oslo_vmware.api [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017201, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.699454] env[63345]: DEBUG nova.compute.manager [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 833.699725] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 833.700720] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a3f3b5-31e4-4028-b5ab-b47016fd0988 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.709787] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 833.710058] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89586d89-0eac-4d73-aa0c-d47aef46df6c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.712677] env[63345]: DEBUG nova.scheduler.client.report [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 833.723440] env[63345]: DEBUG oslo_vmware.api [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Waiting for the task: (returnval){ [ 833.723440] env[63345]: value = "task-1017202" [ 833.723440] env[63345]: _type = "Task" [ 833.723440] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.741610] env[63345]: DEBUG nova.objects.instance [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lazy-loading 'flavor' on Instance uuid bc9d2e6a-f77a-4a21-90bc-81949cbfce91 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 833.748146] env[63345]: DEBUG oslo_vmware.api [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': task-1017202, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.750396] env[63345]: DEBUG nova.network.neutron [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Updating instance_info_cache with network_info: [{"id": "a8479a06-71fa-42d6-a093-13fcbbae3778", "address": "fa:16:3e:63:55:2c", "network": {"id": "f05df594-fc76-4e2d-b29b-6942fee8dc99", "bridge": "br-int", "label": "tempest-ServersTestJSON-241206779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63d7b3facae6416989f763e610cf98f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8479a06-71", "ovs_interfaceid": "a8479a06-71fa-42d6-a093-13fcbbae3778", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.766652] env[63345]: DEBUG oslo_vmware.api [None req-0d78bf12-3c5c-471b-b2c2-90e73c9dea51 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017200, 'name': PowerOffVM_Task, 'duration_secs': 0.346638} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.767213] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d78bf12-3c5c-471b-b2c2-90e73c9dea51 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 833.767415] env[63345]: DEBUG nova.compute.manager [None req-0d78bf12-3c5c-471b-b2c2-90e73c9dea51 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 833.768507] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6677a3-ddd0-4bd6-8cf2-d8deef481669 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.819857] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52bc024a-a9fa-339f-8cf2-889c0a977f1b, 'name': SearchDatastore_Task, 'duration_secs': 0.011032} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.820199] env[63345]: DEBUG oslo_concurrency.lockutils [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.820454] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.820680] env[63345]: DEBUG oslo_concurrency.lockutils [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.820966] env[63345]: DEBUG oslo_concurrency.lockutils [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.821757] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 833.821757] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed63a114-ed8b-4c6f-982c-f793b9753800 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.834034] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.834034] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 833.834034] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67389d24-e59d-4183-857d-8cff76c18416 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.842322] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 833.842322] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b2e4a0-3879-4c6e-81c1-a693da4a7384" [ 833.842322] env[63345]: _type = "Task" [ 833.842322] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.852809] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b2e4a0-3879-4c6e-81c1-a693da4a7384, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.100921] env[63345]: DEBUG nova.network.neutron [req-c369db26-e437-4100-9e6e-0c239cd6e809 req-fcfffd3c-8e6e-48fc-b0c7-7243af89e082 service nova] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Updated VIF entry in instance network info cache for port 56e0ca56-d9a3-439a-b072-ad4f8da026e8. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 834.101402] env[63345]: DEBUG nova.network.neutron [req-c369db26-e437-4100-9e6e-0c239cd6e809 req-fcfffd3c-8e6e-48fc-b0c7-7243af89e082 service nova] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Updating instance_info_cache with network_info: [{"id": "56e0ca56-d9a3-439a-b072-ad4f8da026e8", "address": "fa:16:3e:0e:39:69", "network": {"id": "dc725254-60a8-4edc-aab2-604dfb70677d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1100061234-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "34efcd7d600f49698c6619be002d838f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56e0ca56-d9", "ovs_interfaceid": "56e0ca56-d9a3-439a-b072-ad4f8da026e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.110674] env[63345]: DEBUG oslo_vmware.api [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017201, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.218661] env[63345]: DEBUG oslo_concurrency.lockutils [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.031s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.221688] env[63345]: DEBUG oslo_concurrency.lockutils [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.988s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.221688] env[63345]: DEBUG nova.objects.instance [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Lazy-loading 'resources' on Instance uuid 00c58889-75f7-4a4b-a5a3-a45723c1f495 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 834.234149] env[63345]: DEBUG oslo_vmware.api [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': task-1017202, 'name': PowerOffVM_Task, 'duration_secs': 0.34944} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.234319] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 834.234508] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 834.235214] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fa522ad0-4e5c-4eb1-b664-df67336c61a4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.244320] env[63345]: INFO nova.scheduler.client.report [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Deleted allocations for instance fe3e2b2a-9583-482e-b69b-6c130801d7db [ 834.255082] env[63345]: DEBUG oslo_concurrency.lockutils [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "refresh_cache-f37b4a95-0725-4a84-b726-fd4f26e87020" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.255383] env[63345]: DEBUG nova.compute.manager [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Instance network_info: |[{"id": "a8479a06-71fa-42d6-a093-13fcbbae3778", "address": "fa:16:3e:63:55:2c", "network": {"id": "f05df594-fc76-4e2d-b29b-6942fee8dc99", "bridge": "br-int", "label": "tempest-ServersTestJSON-241206779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63d7b3facae6416989f763e610cf98f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8479a06-71", "ovs_interfaceid": "a8479a06-71fa-42d6-a093-13fcbbae3778", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 834.257488] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:55:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8479a06-71fa-42d6-a093-13fcbbae3778', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 834.274391] env[63345]: DEBUG oslo.service.loopingcall [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 834.275242] env[63345]: DEBUG oslo_concurrency.lockutils [None req-324cad41-08d0-4374-8c43-f553c7ae1cfd tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.368s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.277064] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 834.277064] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f7f15b49-f1d0-42e8-8cee-564fb2e6b04b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.310861] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0d78bf12-3c5c-471b-b2c2-90e73c9dea51 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.099s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.318175] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 834.318175] env[63345]: value = "task-1017204" [ 834.318175] env[63345]: _type = "Task" [ 834.318175] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.320155] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 834.320459] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 834.320672] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Deleting the datastore file [datastore2] 40d228ea-881e-4442-a16a-6758d061aa39 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 834.324760] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-89563c5e-75cc-4248-84bc-50bb35431ab4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.335460] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017204, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.337436] env[63345]: DEBUG oslo_vmware.api [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Waiting for the task: (returnval){ [ 834.337436] env[63345]: value = "task-1017205" [ 834.337436] env[63345]: _type = "Task" [ 834.337436] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.354607] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b2e4a0-3879-4c6e-81c1-a693da4a7384, 'name': SearchDatastore_Task, 'duration_secs': 0.011566} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.360534] env[63345]: DEBUG oslo_vmware.api [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': task-1017205, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.360824] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09487d14-372b-44cc-96a3-73808b15e723 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.368673] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 834.368673] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b3ab1d-fa51-4c11-baae-826256e8e4d0" [ 834.368673] env[63345]: _type = "Task" [ 834.368673] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.387375] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b3ab1d-fa51-4c11-baae-826256e8e4d0, 'name': SearchDatastore_Task, 'duration_secs': 0.013539} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.388125] env[63345]: DEBUG oslo_concurrency.lockutils [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.388258] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] dde93fd5-6312-4d91-b041-b7fc84b207d3/dde93fd5-6312-4d91-b041-b7fc84b207d3.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 834.388496] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47b323ec-2c67-4407-9b85-7c370709b74b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.399704] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 834.399704] env[63345]: value = "task-1017206" [ 834.399704] env[63345]: _type = "Task" [ 834.399704] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.412030] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017206, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.607426] env[63345]: DEBUG oslo_concurrency.lockutils [req-c369db26-e437-4100-9e6e-0c239cd6e809 req-fcfffd3c-8e6e-48fc-b0c7-7243af89e082 service nova] Releasing lock "refresh_cache-dde93fd5-6312-4d91-b041-b7fc84b207d3" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.609948] env[63345]: DEBUG oslo_vmware.api [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017201, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.756996] env[63345]: DEBUG oslo_concurrency.lockutils [None req-128f3b73-bb07-41af-8973-c75285c7dc1a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "fe3e2b2a-9583-482e-b69b-6c130801d7db" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.702s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.835146] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017204, 'name': CreateVM_Task, 'duration_secs': 0.432591} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.835146] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 834.835784] env[63345]: DEBUG oslo_concurrency.lockutils [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.836265] env[63345]: DEBUG oslo_concurrency.lockutils [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.836355] env[63345]: DEBUG oslo_concurrency.lockutils [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 834.836623] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c53f894-ac51-46a8-ac9b-c9d17ffd6e21 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.847944] env[63345]: DEBUG nova.network.neutron [-] [instance: cb712d80-be78-4c19-a891-329011521f30] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.850084] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 834.850084] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d64bf2-ae41-800d-8790-ce8e464aab6a" [ 834.850084] env[63345]: _type = "Task" [ 834.850084] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.860874] env[63345]: DEBUG oslo_vmware.api [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Task: {'id': task-1017205, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206679} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.862074] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 834.862074] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 834.862262] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 834.862476] env[63345]: INFO nova.compute.manager [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Took 1.16 seconds to destroy the instance on the hypervisor. [ 834.863394] env[63345]: DEBUG oslo.service.loopingcall [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 834.863596] env[63345]: DEBUG nova.compute.manager [-] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 834.863596] env[63345]: DEBUG nova.network.neutron [-] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 834.874457] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d64bf2-ae41-800d-8790-ce8e464aab6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.877944] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d1264cb-55a4-4bb7-963b-f6a8e2dc83c5 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.877944] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d1264cb-55a4-4bb7-963b-f6a8e2dc83c5 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.916689] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017206, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.005817] env[63345]: DEBUG nova.compute.manager [req-e617af47-afca-45b6-8b44-1413e8ccb1b7 req-27e90ba1-f00b-4dd3-a03b-befa5aedfe26 service nova] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Received event network-changed-a8479a06-71fa-42d6-a093-13fcbbae3778 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 835.006070] env[63345]: DEBUG nova.compute.manager [req-e617af47-afca-45b6-8b44-1413e8ccb1b7 req-27e90ba1-f00b-4dd3-a03b-befa5aedfe26 service nova] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Refreshing instance network info cache due to event network-changed-a8479a06-71fa-42d6-a093-13fcbbae3778. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 835.006317] env[63345]: DEBUG oslo_concurrency.lockutils [req-e617af47-afca-45b6-8b44-1413e8ccb1b7 req-27e90ba1-f00b-4dd3-a03b-befa5aedfe26 service nova] Acquiring lock "refresh_cache-f37b4a95-0725-4a84-b726-fd4f26e87020" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.007029] env[63345]: DEBUG oslo_concurrency.lockutils [req-e617af47-afca-45b6-8b44-1413e8ccb1b7 req-27e90ba1-f00b-4dd3-a03b-befa5aedfe26 service nova] Acquired lock "refresh_cache-f37b4a95-0725-4a84-b726-fd4f26e87020" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.007029] env[63345]: DEBUG nova.network.neutron [req-e617af47-afca-45b6-8b44-1413e8ccb1b7 req-27e90ba1-f00b-4dd3-a03b-befa5aedfe26 service nova] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Refreshing network info cache for port a8479a06-71fa-42d6-a093-13fcbbae3778 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 835.110381] env[63345]: DEBUG oslo_vmware.api [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017201, 'name': RemoveSnapshot_Task, 'duration_secs': 1.231994} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.115668] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Deleted Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 835.198260] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c0397a-1064-4050-98bf-7b4609def5c8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.208064] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a10516-0e71-445d-824f-0a0013703b3c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.250156] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d097144-69f3-421d-bc72-62022c40d55c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.258844] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9549c21e-cf91-441e-a52e-38507a304db5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.277726] env[63345]: DEBUG nova.compute.provider_tree [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.362027] env[63345]: INFO nova.compute.manager [-] [instance: cb712d80-be78-4c19-a891-329011521f30] Took 1.80 seconds to deallocate network for instance. [ 835.362371] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d64bf2-ae41-800d-8790-ce8e464aab6a, 'name': SearchDatastore_Task, 'duration_secs': 0.057708} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.364155] env[63345]: DEBUG oslo_concurrency.lockutils [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.364392] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 835.364622] env[63345]: DEBUG oslo_concurrency.lockutils [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.364769] env[63345]: DEBUG oslo_concurrency.lockutils [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.364946] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 835.367711] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e89717e6-2b8e-458d-820a-e47d853e216a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.381890] env[63345]: INFO nova.compute.manager [None req-5d1264cb-55a4-4bb7-963b-f6a8e2dc83c5 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Detaching volume ae2b09a5-682d-4ef7-9729-fbe017759426 [ 835.383734] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 835.384228] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 835.385282] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-311d46fe-e66e-423d-9ad1-b723edbde92a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.394566] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 835.394566] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52150ec9-d8f3-3a1f-9015-068554b73d2f" [ 835.394566] env[63345]: _type = "Task" [ 835.394566] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.406250] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52150ec9-d8f3-3a1f-9015-068554b73d2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.414432] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017206, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.570411} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.414963] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] dde93fd5-6312-4d91-b041-b7fc84b207d3/dde93fd5-6312-4d91-b041-b7fc84b207d3.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 835.415199] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 835.415457] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-82ca6662-dab7-43be-8e2a-674d2697bc44 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.425893] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 835.425893] env[63345]: value = "task-1017207" [ 835.425893] env[63345]: _type = "Task" [ 835.425893] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.430135] env[63345]: INFO nova.virt.block_device [None req-5d1264cb-55a4-4bb7-963b-f6a8e2dc83c5 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Attempting to driver detach volume ae2b09a5-682d-4ef7-9729-fbe017759426 from mountpoint /dev/sdb [ 835.430369] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d1264cb-55a4-4bb7-963b-f6a8e2dc83c5 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Volume detach. Driver type: vmdk {{(pid=63345) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 835.430562] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d1264cb-55a4-4bb7-963b-f6a8e2dc83c5 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226058', 'volume_id': 'ae2b09a5-682d-4ef7-9729-fbe017759426', 'name': 'volume-ae2b09a5-682d-4ef7-9729-fbe017759426', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc9d2e6a-f77a-4a21-90bc-81949cbfce91', 'attached_at': '', 'detached_at': '', 'volume_id': 'ae2b09a5-682d-4ef7-9729-fbe017759426', 'serial': 'ae2b09a5-682d-4ef7-9729-fbe017759426'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 835.431382] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9afbf2d-0ef4-4819-8363-e5e6cde52afa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.440191] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017207, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.458719] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd47854-c497-480b-be22-937329e01a6c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.465749] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b3e8ad-07dd-4e73-af1e-573c85ee68fa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.486903] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4617b2d6-2431-4089-b885-fccef070d293 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.505144] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d1264cb-55a4-4bb7-963b-f6a8e2dc83c5 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] The volume has not been displaced from its original location: [datastore2] volume-ae2b09a5-682d-4ef7-9729-fbe017759426/volume-ae2b09a5-682d-4ef7-9729-fbe017759426.vmdk. No consolidation needed. {{(pid=63345) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 835.510780] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d1264cb-55a4-4bb7-963b-f6a8e2dc83c5 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Reconfiguring VM instance instance-0000001c to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 835.510780] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07f3fff9-16f5-4285-a288-60ee2f419d2f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.533121] env[63345]: DEBUG oslo_vmware.api [None req-5d1264cb-55a4-4bb7-963b-f6a8e2dc83c5 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 835.533121] env[63345]: value = "task-1017208" [ 835.533121] env[63345]: _type = "Task" [ 835.533121] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.541687] env[63345]: DEBUG oslo_vmware.api [None req-5d1264cb-55a4-4bb7-963b-f6a8e2dc83c5 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017208, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.618703] env[63345]: WARNING nova.compute.manager [None req-badec31a-3c15-4f7e-95ed-3050b7a3a354 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Image not found during snapshot: nova.exception.ImageNotFound: Image fd7587f1-3c12-4603-ab6e-b5e1ea2d7b64 could not be found. [ 835.704995] env[63345]: DEBUG nova.compute.manager [req-797f7874-e178-4a69-9d6f-5e64d8d4d2de req-e82bf864-a2ff-48fe-9db5-145af15164c0 service nova] [instance: cb712d80-be78-4c19-a891-329011521f30] Received event network-vif-deleted-253ec25c-f8ef-41b2-a789-844509636178 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 835.705341] env[63345]: DEBUG nova.compute.manager [req-797f7874-e178-4a69-9d6f-5e64d8d4d2de req-e82bf864-a2ff-48fe-9db5-145af15164c0 service nova] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Received event network-vif-deleted-d58bb0b4-4fd1-4361-b242-9e186e278f08 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 835.705587] env[63345]: INFO nova.compute.manager [req-797f7874-e178-4a69-9d6f-5e64d8d4d2de req-e82bf864-a2ff-48fe-9db5-145af15164c0 service nova] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Neutron deleted interface d58bb0b4-4fd1-4361-b242-9e186e278f08; detaching it from the instance and deleting it from the info cache [ 835.705827] env[63345]: DEBUG nova.network.neutron [req-797f7874-e178-4a69-9d6f-5e64d8d4d2de req-e82bf864-a2ff-48fe-9db5-145af15164c0 service nova] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.765438] env[63345]: DEBUG nova.network.neutron [-] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.780571] env[63345]: DEBUG nova.scheduler.client.report [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 835.874999] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.886082] env[63345]: INFO nova.compute.manager [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Rebuilding instance [ 835.908825] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52150ec9-d8f3-3a1f-9015-068554b73d2f, 'name': SearchDatastore_Task, 'duration_secs': 0.015323} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.913819] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e8ae9ee-c4a6-403c-a8ff-40b509a7af2a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.919384] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 835.919384] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5294fc5b-af69-ed4e-198b-dfbb6724a620" [ 835.919384] env[63345]: _type = "Task" [ 835.919384] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.928860] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5294fc5b-af69-ed4e-198b-dfbb6724a620, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.937429] env[63345]: DEBUG nova.compute.manager [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 835.938261] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe8a8c0d-01f2-40eb-8822-c3eb65eed9bc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.944340] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017207, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107106} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.944970] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 835.946012] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce746513-ff2a-460d-8e58-51b37aa84386 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.954036] env[63345]: DEBUG nova.network.neutron [req-e617af47-afca-45b6-8b44-1413e8ccb1b7 req-27e90ba1-f00b-4dd3-a03b-befa5aedfe26 service nova] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Updated VIF entry in instance network info cache for port a8479a06-71fa-42d6-a093-13fcbbae3778. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 835.954411] env[63345]: DEBUG nova.network.neutron [req-e617af47-afca-45b6-8b44-1413e8ccb1b7 req-27e90ba1-f00b-4dd3-a03b-befa5aedfe26 service nova] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Updating instance_info_cache with network_info: [{"id": "a8479a06-71fa-42d6-a093-13fcbbae3778", "address": "fa:16:3e:63:55:2c", "network": {"id": "f05df594-fc76-4e2d-b29b-6942fee8dc99", "bridge": "br-int", "label": "tempest-ServersTestJSON-241206779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63d7b3facae6416989f763e610cf98f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8479a06-71", "ovs_interfaceid": "a8479a06-71fa-42d6-a093-13fcbbae3778", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.975425] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] dde93fd5-6312-4d91-b041-b7fc84b207d3/dde93fd5-6312-4d91-b041-b7fc84b207d3.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 835.976305] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98e006d8-7f28-4c71-9d80-c35e4193e8fd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.997709] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 835.997709] env[63345]: value = "task-1017209" [ 835.997709] env[63345]: _type = "Task" [ 835.997709] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.008526] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017209, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.042882] env[63345]: DEBUG oslo_vmware.api [None req-5d1264cb-55a4-4bb7-963b-f6a8e2dc83c5 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017208, 'name': ReconfigVM_Task, 'duration_secs': 0.287399} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.043231] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d1264cb-55a4-4bb7-963b-f6a8e2dc83c5 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Reconfigured VM instance instance-0000001c to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 836.047782] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b449d687-a1ba-4e0b-87a7-5bfa5b369cb3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.063860] env[63345]: DEBUG oslo_vmware.api [None req-5d1264cb-55a4-4bb7-963b-f6a8e2dc83c5 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 836.063860] env[63345]: value = "task-1017210" [ 836.063860] env[63345]: _type = "Task" [ 836.063860] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.072625] env[63345]: DEBUG oslo_vmware.api [None req-5d1264cb-55a4-4bb7-963b-f6a8e2dc83c5 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017210, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.208594] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a304f993-24c3-4a0b-b759-af6beae82238 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.222516] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb730166-f128-4d70-828e-9771cd2b463e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.260405] env[63345]: DEBUG nova.compute.manager [req-797f7874-e178-4a69-9d6f-5e64d8d4d2de req-e82bf864-a2ff-48fe-9db5-145af15164c0 service nova] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Detach interface failed, port_id=d58bb0b4-4fd1-4361-b242-9e186e278f08, reason: Instance 40d228ea-881e-4442-a16a-6758d061aa39 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 836.267645] env[63345]: INFO nova.compute.manager [-] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Took 1.40 seconds to deallocate network for instance. [ 836.287970] env[63345]: DEBUG oslo_concurrency.lockutils [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.067s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.290223] env[63345]: DEBUG oslo_concurrency.lockutils [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.478s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.291817] env[63345]: INFO nova.compute.claims [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 836.313822] env[63345]: INFO nova.scheduler.client.report [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Deleted allocations for instance 00c58889-75f7-4a4b-a5a3-a45723c1f495 [ 836.431100] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5294fc5b-af69-ed4e-198b-dfbb6724a620, 'name': SearchDatastore_Task, 'duration_secs': 0.010568} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.431493] env[63345]: DEBUG oslo_concurrency.lockutils [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.431681] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] f37b4a95-0725-4a84-b726-fd4f26e87020/f37b4a95-0725-4a84-b726-fd4f26e87020.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 836.431949] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3199b8b-7ac5-4097-b345-f476d04fd18e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.440044] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 836.440044] env[63345]: value = "task-1017211" [ 836.440044] env[63345]: _type = "Task" [ 836.440044] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.448098] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017211, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.456836] env[63345]: DEBUG oslo_concurrency.lockutils [req-e617af47-afca-45b6-8b44-1413e8ccb1b7 req-27e90ba1-f00b-4dd3-a03b-befa5aedfe26 service nova] Releasing lock "refresh_cache-f37b4a95-0725-4a84-b726-fd4f26e87020" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.510747] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017209, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.576510] env[63345]: DEBUG oslo_vmware.api [None req-5d1264cb-55a4-4bb7-963b-f6a8e2dc83c5 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017210, 'name': ReconfigVM_Task, 'duration_secs': 0.166042} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.576850] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d1264cb-55a4-4bb7-963b-f6a8e2dc83c5 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226058', 'volume_id': 'ae2b09a5-682d-4ef7-9729-fbe017759426', 'name': 'volume-ae2b09a5-682d-4ef7-9729-fbe017759426', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc9d2e6a-f77a-4a21-90bc-81949cbfce91', 'attached_at': '', 'detached_at': '', 'volume_id': 'ae2b09a5-682d-4ef7-9729-fbe017759426', 'serial': 'ae2b09a5-682d-4ef7-9729-fbe017759426'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 836.765672] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "df2f06af-54a6-4dbd-83ff-1e4b066acbf3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.766042] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "df2f06af-54a6-4dbd-83ff-1e4b066acbf3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.766331] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "df2f06af-54a6-4dbd-83ff-1e4b066acbf3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.766550] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "df2f06af-54a6-4dbd-83ff-1e4b066acbf3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.766730] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "df2f06af-54a6-4dbd-83ff-1e4b066acbf3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.769596] env[63345]: INFO nova.compute.manager [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Terminating instance [ 836.776944] env[63345]: DEBUG oslo_concurrency.lockutils [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.804636] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "0da64b45-fa00-4fe8-8d1d-df586f27743f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.804966] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "0da64b45-fa00-4fe8-8d1d-df586f27743f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.821925] env[63345]: DEBUG oslo_concurrency.lockutils [None req-50b04232-926d-4677-9129-41a8dd83ce1b tempest-ServerRescueTestJSONUnderV235-2122736330 tempest-ServerRescueTestJSONUnderV235-2122736330-project-member] Lock "00c58889-75f7-4a4b-a5a3-a45723c1f495" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.111s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.957242] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017211, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.961115] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 836.961544] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-20893e3c-8a14-4bf9-88fe-8152405fba5b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.971122] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 836.971122] env[63345]: value = "task-1017212" [ 836.971122] env[63345]: _type = "Task" [ 836.971122] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.981521] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017212, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.007539] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017209, 'name': ReconfigVM_Task, 'duration_secs': 0.577071} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.007864] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Reconfigured VM instance instance-00000048 to attach disk [datastore1] dde93fd5-6312-4d91-b041-b7fc84b207d3/dde93fd5-6312-4d91-b041-b7fc84b207d3.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 837.011502] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-927828aa-c11f-4281-83c0-df91cf26e0fd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.019205] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 837.019205] env[63345]: value = "task-1017213" [ 837.019205] env[63345]: _type = "Task" [ 837.019205] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.027263] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017213, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.140020] env[63345]: DEBUG nova.objects.instance [None req-5d1264cb-55a4-4bb7-963b-f6a8e2dc83c5 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lazy-loading 'flavor' on Instance uuid bc9d2e6a-f77a-4a21-90bc-81949cbfce91 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 837.274543] env[63345]: DEBUG nova.compute.manager [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 837.276589] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 837.276589] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff0292e-e9ea-4de0-8882-4f899264233e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.287719] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 837.287926] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce1d55a6-a303-49a3-bb6d-a0f1c23219d5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.295729] env[63345]: DEBUG oslo_vmware.api [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 837.295729] env[63345]: value = "task-1017214" [ 837.295729] env[63345]: _type = "Task" [ 837.295729] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.310912] env[63345]: DEBUG oslo_vmware.api [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017214, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.311551] env[63345]: DEBUG nova.compute.manager [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 837.456021] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017211, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.550154} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.456021] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] f37b4a95-0725-4a84-b726-fd4f26e87020/f37b4a95-0725-4a84-b726-fd4f26e87020.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 837.456021] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 837.456021] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-14eaf143-af8a-4d79-ab57-5cacc974cb9f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.465023] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 837.465023] env[63345]: value = "task-1017215" [ 837.465023] env[63345]: _type = "Task" [ 837.465023] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.472202] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017215, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.486789] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] VM already powered off {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 837.486789] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 837.486789] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8824db79-6e3c-46ea-915d-d57370cdab6b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.499026] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 837.499026] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b33c4179-c888-4e4a-a95e-a5a223f48188 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.532188] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017213, 'name': Rename_Task, 'duration_secs': 0.210943} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.537986] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 837.538820] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cf1e5e78-a443-4ab0-b147-fd642cec24b0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.548439] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 837.548439] env[63345]: value = "task-1017217" [ 837.548439] env[63345]: _type = "Task" [ 837.548439] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.561032] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017217, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.586194] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 837.586459] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 837.586674] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleting the datastore file [datastore2] 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 837.586997] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81fa4a79-49ca-4a70-9cb3-163405fe2bfd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.597328] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 837.597328] env[63345]: value = "task-1017218" [ 837.597328] env[63345]: _type = "Task" [ 837.597328] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.705309] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab1313a-f1e1-46c1-8b24-71b74f4fee1c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.712524] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-185bc5f8-211f-4ed3-9305-909b92c50268 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.746025] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ee558d-7822-4fd3-bb8d-032ef5e1c4e5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.754926] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b4f96d-37f8-4f12-88e3-f04cb182bd49 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.770854] env[63345]: DEBUG nova.compute.provider_tree [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.808403] env[63345]: DEBUG oslo_vmware.api [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017214, 'name': PowerOffVM_Task, 'duration_secs': 0.317711} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.808682] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 837.808845] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 837.809170] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-edc5c9ac-95dd-45ec-98a4-ca0a4edb283d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.840244] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.895182] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 837.895464] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 837.895657] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Deleting the datastore file [datastore2] df2f06af-54a6-4dbd-83ff-1e4b066acbf3 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 837.895969] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a0c05a98-51d3-49a8-8acf-234acac59b3e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.903375] env[63345]: DEBUG oslo_vmware.api [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 837.903375] env[63345]: value = "task-1017220" [ 837.903375] env[63345]: _type = "Task" [ 837.903375] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.912359] env[63345]: DEBUG oslo_vmware.api [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017220, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.973750] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017215, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.148365} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.974050] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 837.974880] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8788e1-bcf5-4f38-8a62-73061adec0c6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.006260] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] f37b4a95-0725-4a84-b726-fd4f26e87020/f37b4a95-0725-4a84-b726-fd4f26e87020.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 838.006582] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6e468c5-385d-483d-9c3f-03b5e4e6227d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.027945] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 838.027945] env[63345]: value = "task-1017221" [ 838.027945] env[63345]: _type = "Task" [ 838.027945] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.036680] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017221, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.062125] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017217, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.111067] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017218, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201714} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.111669] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 838.111778] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 838.112220] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 838.152181] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d1264cb-55a4-4bb7-963b-f6a8e2dc83c5 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.274s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.274766] env[63345]: DEBUG nova.scheduler.client.report [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 838.416103] env[63345]: DEBUG oslo_vmware.api [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017220, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136482} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.416378] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 838.416575] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 838.416751] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 838.416922] env[63345]: INFO nova.compute.manager [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Took 1.14 seconds to destroy the instance on the hypervisor. [ 838.417190] env[63345]: DEBUG oslo.service.loopingcall [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.417438] env[63345]: DEBUG nova.compute.manager [-] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 838.417551] env[63345]: DEBUG nova.network.neutron [-] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 838.538855] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017221, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.562675] env[63345]: DEBUG oslo_vmware.api [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017217, 'name': PowerOnVM_Task, 'duration_secs': 0.732329} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.562675] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 838.562859] env[63345]: INFO nova.compute.manager [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Took 9.12 seconds to spawn the instance on the hypervisor. [ 838.563292] env[63345]: DEBUG nova.compute.manager [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 838.564146] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd94e0d-0fbe-4678-8655-98e64cda01bf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.780091] env[63345]: DEBUG oslo_concurrency.lockutils [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.490s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.780653] env[63345]: DEBUG nova.compute.manager [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 838.784524] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.682s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.785164] env[63345]: DEBUG nova.objects.instance [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Lazy-loading 'resources' on Instance uuid f043239f-7158-4199-a784-d711a5a301be {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 839.040384] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017221, 'name': ReconfigVM_Task, 'duration_secs': 0.792702} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.040722] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Reconfigured VM instance instance-00000049 to attach disk [datastore1] f37b4a95-0725-4a84-b726-fd4f26e87020/f37b4a95-0725-4a84-b726-fd4f26e87020.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 839.043727] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c6cd6f9d-2bb3-4e05-85e3-4d76031311d6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.048970] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 839.048970] env[63345]: value = "task-1017222" [ 839.048970] env[63345]: _type = "Task" [ 839.048970] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.057806] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017222, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.070821] env[63345]: DEBUG nova.compute.manager [req-2b63551f-1a02-4423-8838-fecb4915e689 req-ae4bed50-1873-40f4-9340-51d43768641d service nova] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Received event network-vif-deleted-9682a3c3-bc5f-4d38-998f-d008a47b824e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 839.071094] env[63345]: INFO nova.compute.manager [req-2b63551f-1a02-4423-8838-fecb4915e689 req-ae4bed50-1873-40f4-9340-51d43768641d service nova] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Neutron deleted interface 9682a3c3-bc5f-4d38-998f-d008a47b824e; detaching it from the instance and deleting it from the info cache [ 839.071271] env[63345]: DEBUG nova.network.neutron [req-2b63551f-1a02-4423-8838-fecb4915e689 req-ae4bed50-1873-40f4-9340-51d43768641d service nova] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.085854] env[63345]: INFO nova.compute.manager [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Took 38.94 seconds to build instance. [ 839.151496] env[63345]: DEBUG nova.virt.hardware [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 839.151702] env[63345]: DEBUG nova.virt.hardware [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 839.151945] env[63345]: DEBUG nova.virt.hardware [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 839.152887] env[63345]: DEBUG nova.virt.hardware [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 839.152887] env[63345]: DEBUG nova.virt.hardware [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 839.152887] env[63345]: DEBUG nova.virt.hardware [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 839.153041] env[63345]: DEBUG nova.virt.hardware [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 839.153542] env[63345]: DEBUG nova.virt.hardware [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 839.153542] env[63345]: DEBUG nova.virt.hardware [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 839.153707] env[63345]: DEBUG nova.virt.hardware [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 839.153852] env[63345]: DEBUG nova.virt.hardware [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 839.156736] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f255feb6-b730-4ab8-b532-b87dc6575be2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.166827] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9014bade-56b1-436e-bb52-3e0ecad150f3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.182210] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:6b:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '68add7d6-c025-46fa-84d3-9c589adb63e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07017fee-f295-4317-9453-e41726d715c5', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 839.191146] env[63345]: DEBUG oslo.service.loopingcall [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 839.191146] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 839.191146] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72688ae3-832b-4915-b36b-95a10049e451 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.215881] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 839.215881] env[63345]: value = "task-1017223" [ 839.215881] env[63345]: _type = "Task" [ 839.215881] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.227513] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017223, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.286278] env[63345]: DEBUG nova.compute.utils [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 839.288601] env[63345]: DEBUG nova.compute.manager [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 839.288873] env[63345]: DEBUG nova.network.neutron [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 839.317993] env[63345]: DEBUG nova.network.neutron [-] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.360072] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "14198777-9091-4c69-8928-c83135acc7d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.360072] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "14198777-9091-4c69-8928-c83135acc7d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.373845] env[63345]: DEBUG nova.policy [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fba791838ff14041a93099479d8b5d84', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e53a2ba19a0d4e1e86650549bc1f32c0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 839.564389] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017222, 'name': Rename_Task, 'duration_secs': 0.276737} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.565458] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 839.565757] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d96be57c-9a2c-441b-b0ed-10daa0cb7b88 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.578778] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8a9245d4-ebe1-4f0c-bf84-df4b1096bb58 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.580600] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 839.580600] env[63345]: value = "task-1017224" [ 839.580600] env[63345]: _type = "Task" [ 839.580600] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.591866] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b63b6e-9b26-4b2d-92e8-704e75d7176a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.604115] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017224, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.607409] env[63345]: DEBUG oslo_concurrency.lockutils [None req-debaea8c-190c-4fd1-b305-0edc9cc2f7c2 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "dde93fd5-6312-4d91-b041-b7fc84b207d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.474s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.639629] env[63345]: DEBUG nova.compute.manager [req-2b63551f-1a02-4423-8838-fecb4915e689 req-ae4bed50-1873-40f4-9340-51d43768641d service nova] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Detach interface failed, port_id=9682a3c3-bc5f-4d38-998f-d008a47b824e, reason: Instance df2f06af-54a6-4dbd-83ff-1e4b066acbf3 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 839.726834] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017223, 'name': CreateVM_Task, 'duration_secs': 0.390808} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.727037] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 839.727801] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.727973] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.728316] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 839.728577] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a006e24-f762-46d6-ade4-dd87e3a03776 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.734456] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 839.734456] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]521a4470-2eb8-d064-8309-5a1c57de31cd" [ 839.734456] env[63345]: _type = "Task" [ 839.734456] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.744073] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521a4470-2eb8-d064-8309-5a1c57de31cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.765923] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c30e1b-de92-4141-b50c-d08e5e2f2adf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.774966] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a907cfb-2822-4fe4-ad9d-e5e4d2e05326 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.807772] env[63345]: DEBUG nova.compute.manager [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 839.815188] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c8bb9e-3939-45ad-8720-20eed36abd3a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.822865] env[63345]: INFO nova.compute.manager [-] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Took 1.41 seconds to deallocate network for instance. [ 839.828169] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278bbfd9-3708-4e0d-8ac2-f4b9c0a75fd1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.844671] env[63345]: DEBUG nova.compute.provider_tree [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 839.862559] env[63345]: DEBUG nova.compute.manager [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 839.913011] env[63345]: DEBUG nova.network.neutron [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Successfully created port: 1fed049a-d415-4db7-a8c2-d32664f0324b {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 840.093617] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017224, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.127738] env[63345]: INFO nova.compute.manager [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Rescuing [ 840.128068] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "refresh_cache-dde93fd5-6312-4d91-b041-b7fc84b207d3" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.128251] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquired lock "refresh_cache-dde93fd5-6312-4d91-b041-b7fc84b207d3" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.128441] env[63345]: DEBUG nova.network.neutron [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 840.256624] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521a4470-2eb8-d064-8309-5a1c57de31cd, 'name': SearchDatastore_Task, 'duration_secs': 0.009945} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.256624] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.256624] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 840.256624] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.256624] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.256624] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 840.256868] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ff0d2d5-8bc1-490b-9639-7c2f47459189 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.266102] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 840.266964] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 840.267086] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f80807b-c3f7-40f6-b7f2-a42ed585a072 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.273474] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 840.273474] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52fdf258-e180-c944-b914-1d54ccd4de85" [ 840.273474] env[63345]: _type = "Task" [ 840.273474] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.281684] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52fdf258-e180-c944-b914-1d54ccd4de85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.335063] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.347454] env[63345]: DEBUG nova.scheduler.client.report [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 840.390015] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.593945] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017224, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.718128] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquiring lock "4a59b565-571f-48ef-97bd-bed9853e2d8e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.718128] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "4a59b565-571f-48ef-97bd-bed9853e2d8e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.718128] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquiring lock "4a59b565-571f-48ef-97bd-bed9853e2d8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.718828] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "4a59b565-571f-48ef-97bd-bed9853e2d8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.719205] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "4a59b565-571f-48ef-97bd-bed9853e2d8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.721836] env[63345]: INFO nova.compute.manager [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Terminating instance [ 840.784867] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52fdf258-e180-c944-b914-1d54ccd4de85, 'name': SearchDatastore_Task, 'duration_secs': 0.009216} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.785687] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcc60a09-4d7c-47a4-9729-c46401490922 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.791400] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 840.791400] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5220ba08-3c42-1b85-7ede-2049c27d8ad0" [ 840.791400] env[63345]: _type = "Task" [ 840.791400] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.801256] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5220ba08-3c42-1b85-7ede-2049c27d8ad0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.827075] env[63345]: DEBUG nova.compute.manager [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 840.847574] env[63345]: DEBUG nova.network.neutron [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Updating instance_info_cache with network_info: [{"id": "56e0ca56-d9a3-439a-b072-ad4f8da026e8", "address": "fa:16:3e:0e:39:69", "network": {"id": "dc725254-60a8-4edc-aab2-604dfb70677d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1100061234-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "34efcd7d600f49698c6619be002d838f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56e0ca56-d9", "ovs_interfaceid": "56e0ca56-d9a3-439a-b072-ad4f8da026e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.856664] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.069s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.856664] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.433s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.858751] env[63345]: INFO nova.compute.claims [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 840.869899] env[63345]: DEBUG nova.virt.hardware [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 840.870153] env[63345]: DEBUG nova.virt.hardware [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 840.870320] env[63345]: DEBUG nova.virt.hardware [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 840.870503] env[63345]: DEBUG nova.virt.hardware [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 840.870651] env[63345]: DEBUG nova.virt.hardware [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 840.870803] env[63345]: DEBUG nova.virt.hardware [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 840.871014] env[63345]: DEBUG nova.virt.hardware [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 840.871217] env[63345]: DEBUG nova.virt.hardware [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 840.871595] env[63345]: DEBUG nova.virt.hardware [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 840.871595] env[63345]: DEBUG nova.virt.hardware [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 840.871732] env[63345]: DEBUG nova.virt.hardware [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 840.872879] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08580308-d37a-47e6-a002-b67f72b92b08 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.882758] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d92667c-97fa-49a3-b4e5-482ea4fcd74f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.905012] env[63345]: INFO nova.scheduler.client.report [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Deleted allocations for instance f043239f-7158-4199-a784-d711a5a301be [ 841.098097] env[63345]: DEBUG oslo_vmware.api [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017224, 'name': PowerOnVM_Task, 'duration_secs': 1.380469} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.098686] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 841.099283] env[63345]: INFO nova.compute.manager [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Took 9.03 seconds to spawn the instance on the hypervisor. [ 841.099659] env[63345]: DEBUG nova.compute.manager [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 841.100610] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6276e5fb-e3e2-4c7a-baa1-eb2853fc2c97 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.226399] env[63345]: DEBUG nova.compute.manager [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 841.226662] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 841.227574] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26d36a9-6d38-4f53-921e-e9eb4adf1974 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.235803] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 841.236225] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e48d6c67-04e9-4041-bee0-dc6383d82f14 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.243248] env[63345]: DEBUG oslo_vmware.api [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 841.243248] env[63345]: value = "task-1017225" [ 841.243248] env[63345]: _type = "Task" [ 841.243248] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.253245] env[63345]: DEBUG oslo_vmware.api [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.307808] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5220ba08-3c42-1b85-7ede-2049c27d8ad0, 'name': SearchDatastore_Task, 'duration_secs': 0.00982} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.308610] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.308610] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f/691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 841.311110] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3b536e2-216e-449c-82c8-3a7dccbfd85a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.320919] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 841.320919] env[63345]: value = "task-1017226" [ 841.320919] env[63345]: _type = "Task" [ 841.320919] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.332700] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017226, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.350745] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Releasing lock "refresh_cache-dde93fd5-6312-4d91-b041-b7fc84b207d3" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.416186] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0001168-4e4f-4106-8211-41222142d8e8 tempest-ServersV294TestFqdnHostnames-438011618 tempest-ServersV294TestFqdnHostnames-438011618-project-member] Lock "f043239f-7158-4199-a784-d711a5a301be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.296s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.622643] env[63345]: INFO nova.compute.manager [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Took 37.94 seconds to build instance. [ 841.760538] env[63345]: DEBUG oslo_vmware.api [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017225, 'name': PowerOffVM_Task, 'duration_secs': 0.196998} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.761238] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 841.761634] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 841.762057] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0cf8660-cc97-4dc0-8af2-e4f9ceae068d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.833591] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017226, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.853021] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 841.853021] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 841.853021] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Deleting the datastore file [datastore2] 4a59b565-571f-48ef-97bd-bed9853e2d8e {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 841.853021] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-78da03be-934a-405f-83b6-acd6fdff6d15 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.861160] env[63345]: DEBUG oslo_vmware.api [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for the task: (returnval){ [ 841.861160] env[63345]: value = "task-1017228" [ 841.861160] env[63345]: _type = "Task" [ 841.861160] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.872977] env[63345]: DEBUG oslo_vmware.api [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.125987] env[63345]: DEBUG oslo_concurrency.lockutils [None req-37261967-0aa1-4263-87a8-3d701a0a06c4 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "f37b4a95-0725-4a84-b726-fd4f26e87020" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.453s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.157809] env[63345]: DEBUG nova.compute.manager [req-6dafd8cc-ec18-4af7-8d35-c4cd6feb202e req-dd476de6-456a-4996-962d-b379953b3ac6 service nova] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Received event network-vif-plugged-1fed049a-d415-4db7-a8c2-d32664f0324b {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 842.158216] env[63345]: DEBUG oslo_concurrency.lockutils [req-6dafd8cc-ec18-4af7-8d35-c4cd6feb202e req-dd476de6-456a-4996-962d-b379953b3ac6 service nova] Acquiring lock "a3f34e0e-2969-406f-a086-a925549e458e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.159288] env[63345]: DEBUG oslo_concurrency.lockutils [req-6dafd8cc-ec18-4af7-8d35-c4cd6feb202e req-dd476de6-456a-4996-962d-b379953b3ac6 service nova] Lock "a3f34e0e-2969-406f-a086-a925549e458e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.159288] env[63345]: DEBUG oslo_concurrency.lockutils [req-6dafd8cc-ec18-4af7-8d35-c4cd6feb202e req-dd476de6-456a-4996-962d-b379953b3ac6 service nova] Lock "a3f34e0e-2969-406f-a086-a925549e458e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.159288] env[63345]: DEBUG nova.compute.manager [req-6dafd8cc-ec18-4af7-8d35-c4cd6feb202e req-dd476de6-456a-4996-962d-b379953b3ac6 service nova] [instance: a3f34e0e-2969-406f-a086-a925549e458e] No waiting events found dispatching network-vif-plugged-1fed049a-d415-4db7-a8c2-d32664f0324b {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 842.159288] env[63345]: WARNING nova.compute.manager [req-6dafd8cc-ec18-4af7-8d35-c4cd6feb202e req-dd476de6-456a-4996-962d-b379953b3ac6 service nova] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Received unexpected event network-vif-plugged-1fed049a-d415-4db7-a8c2-d32664f0324b for instance with vm_state building and task_state spawning. [ 842.187923] env[63345]: DEBUG nova.network.neutron [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Successfully updated port: 1fed049a-d415-4db7-a8c2-d32664f0324b {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 842.290292] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ce5752-3cdb-47e6-b3e2-c684685c2f0d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.298577] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d156a127-d65a-4ee6-aadc-92d9f2f45ece {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.335936] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fadc416-ce73-4564-81e2-d198f8fac6f5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.347630] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b453782-8390-4e06-aa55-2d093a76391f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.351515] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017226, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542537} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.351824] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f/691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 842.351987] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 842.352576] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d2e5017-c64f-4916-9ee2-6e2d87971a20 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.362410] env[63345]: DEBUG nova.compute.provider_tree [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 842.369473] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 842.369473] env[63345]: value = "task-1017229" [ 842.369473] env[63345]: _type = "Task" [ 842.369473] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.372608] env[63345]: DEBUG oslo_vmware.api [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Task: {'id': task-1017228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150457} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.376232] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 842.376437] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 842.376626] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 842.376820] env[63345]: INFO nova.compute.manager [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Took 1.15 seconds to destroy the instance on the hypervisor. [ 842.377058] env[63345]: DEBUG oslo.service.loopingcall [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 842.377561] env[63345]: DEBUG nova.compute.manager [-] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 842.377658] env[63345]: DEBUG nova.network.neutron [-] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 842.386937] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 842.387335] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017229, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.387566] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68a9b01f-deaf-47b2-9b2f-b6d51bfa720d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.394755] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 842.394755] env[63345]: value = "task-1017230" [ 842.394755] env[63345]: _type = "Task" [ 842.394755] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.406798] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017230, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.663509] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "f37b4a95-0725-4a84-b726-fd4f26e87020" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.663824] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "f37b4a95-0725-4a84-b726-fd4f26e87020" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.664062] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "f37b4a95-0725-4a84-b726-fd4f26e87020-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.664267] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "f37b4a95-0725-4a84-b726-fd4f26e87020-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.664447] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "f37b4a95-0725-4a84-b726-fd4f26e87020-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.666671] env[63345]: INFO nova.compute.manager [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Terminating instance [ 842.690364] env[63345]: DEBUG oslo_concurrency.lockutils [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Acquiring lock "refresh_cache-a3f34e0e-2969-406f-a086-a925549e458e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.690571] env[63345]: DEBUG oslo_concurrency.lockutils [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Acquired lock "refresh_cache-a3f34e0e-2969-406f-a086-a925549e458e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.690668] env[63345]: DEBUG nova.network.neutron [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 842.867244] env[63345]: DEBUG nova.scheduler.client.report [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 842.886121] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017229, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.32218} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.886121] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 842.887201] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6ec832-516e-4dab-9b1d-b96a6e8220ec {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.915487] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f/691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 842.920115] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27ffbcea-c417-4782-95d6-8c1e62c81b83 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.944759] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017230, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.947242] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 842.947242] env[63345]: value = "task-1017231" [ 842.947242] env[63345]: _type = "Task" [ 842.947242] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.959714] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017231, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.171322] env[63345]: DEBUG nova.compute.manager [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 843.171698] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 843.172724] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44350af7-ee60-45b3-a46e-fca203ab2988 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.181602] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 843.181885] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60121464-dd90-4ebe-b85a-a9c96a2beb2c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.189958] env[63345]: DEBUG oslo_vmware.api [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 843.189958] env[63345]: value = "task-1017232" [ 843.189958] env[63345]: _type = "Task" [ 843.189958] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.202254] env[63345]: DEBUG oslo_vmware.api [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017232, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.262299] env[63345]: DEBUG nova.network.neutron [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 843.377331] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.521s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.377909] env[63345]: DEBUG nova.compute.manager [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 843.380663] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 20.923s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.412925] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017230, 'name': PowerOffVM_Task, 'duration_secs': 0.5495} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.416777] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 843.419124] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7696553-c6f4-46e4-8d77-b17d7a29d570 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.441102] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6c8de0-760a-4912-8d7b-4bf09eee9179 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.468034] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017231, 'name': ReconfigVM_Task, 'duration_secs': 0.288258} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.468034] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f/691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 843.468034] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a64e66b5-71b1-417c-a8f6-9b98c4f7dab7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.474530] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 843.474530] env[63345]: value = "task-1017233" [ 843.474530] env[63345]: _type = "Task" [ 843.474530] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.484874] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017233, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.486731] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 843.486988] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca2505fd-fc58-43db-b3eb-040d0852aa0c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.494264] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 843.494264] env[63345]: value = "task-1017234" [ 843.494264] env[63345]: _type = "Task" [ 843.494264] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.497393] env[63345]: DEBUG nova.network.neutron [-] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.510863] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] VM already powered off {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 843.510949] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 843.511230] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.511480] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.511725] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 843.512323] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a25873d-4e2d-4c40-af90-baa9e2994cb3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.522606] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 843.522878] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 843.524139] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a6197a0-0d58-4e5c-bb24-f8d3c6bfc1ce {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.530455] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 843.530455] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52afbc2d-7e0d-77dd-d427-dd955d4c2232" [ 843.530455] env[63345]: _type = "Task" [ 843.530455] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.538808] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52afbc2d-7e0d-77dd-d427-dd955d4c2232, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.562250] env[63345]: DEBUG nova.network.neutron [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Updating instance_info_cache with network_info: [{"id": "1fed049a-d415-4db7-a8c2-d32664f0324b", "address": "fa:16:3e:d7:05:68", "network": {"id": "68417cd9-f7c6-4829-9b9a-142934679bcb", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-2049413518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e53a2ba19a0d4e1e86650549bc1f32c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d646f9d5-d2ad-4c22-bea5-85a965334de6", "external-id": "nsx-vlan-transportzone-606", "segmentation_id": 606, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fed049a-d4", "ovs_interfaceid": "1fed049a-d415-4db7-a8c2-d32664f0324b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.703370] env[63345]: DEBUG oslo_vmware.api [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017232, 'name': PowerOffVM_Task, 'duration_secs': 0.18539} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.703703] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 843.703881] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 843.704155] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ed0e99de-364c-437a-9758-d83e03b34174 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.789095] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 843.789589] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Deleting contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 843.790031] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleting the datastore file [datastore1] f37b4a95-0725-4a84-b726-fd4f26e87020 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 843.790404] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17236064-d766-4bba-a304-cb835b20c194 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.801967] env[63345]: DEBUG oslo_vmware.api [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 843.801967] env[63345]: value = "task-1017236" [ 843.801967] env[63345]: _type = "Task" [ 843.801967] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.810910] env[63345]: DEBUG oslo_vmware.api [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017236, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.889048] env[63345]: INFO nova.compute.claims [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 843.897210] env[63345]: DEBUG nova.compute.utils [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 843.899043] env[63345]: DEBUG nova.compute.manager [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 843.899161] env[63345]: DEBUG nova.network.neutron [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 843.986793] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017233, 'name': Rename_Task, 'duration_secs': 0.159502} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.989719] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 843.990018] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e311b61-9197-43ee-b8ff-1f37658f9675 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.996773] env[63345]: DEBUG nova.policy [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb6730bb6292421e8f943bce2e912bef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c535ae9067ab4e8a87e95c68af4624fb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 844.001032] env[63345]: INFO nova.compute.manager [-] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Took 1.62 seconds to deallocate network for instance. [ 844.001032] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 844.001032] env[63345]: value = "task-1017237" [ 844.001032] env[63345]: _type = "Task" [ 844.001032] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.015856] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017237, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.043535] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52afbc2d-7e0d-77dd-d427-dd955d4c2232, 'name': SearchDatastore_Task, 'duration_secs': 0.009176} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.046060] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c0809ea-aecb-4cb9-8bed-33ed80be7475 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.053661] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 844.053661] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]524767b7-68f7-b221-1761-935c76125639" [ 844.053661] env[63345]: _type = "Task" [ 844.053661] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.063183] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524767b7-68f7-b221-1761-935c76125639, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.065771] env[63345]: DEBUG oslo_concurrency.lockutils [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Releasing lock "refresh_cache-a3f34e0e-2969-406f-a086-a925549e458e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.066102] env[63345]: DEBUG nova.compute.manager [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Instance network_info: |[{"id": "1fed049a-d415-4db7-a8c2-d32664f0324b", "address": "fa:16:3e:d7:05:68", "network": {"id": "68417cd9-f7c6-4829-9b9a-142934679bcb", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-2049413518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e53a2ba19a0d4e1e86650549bc1f32c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d646f9d5-d2ad-4c22-bea5-85a965334de6", "external-id": "nsx-vlan-transportzone-606", "segmentation_id": 606, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fed049a-d4", "ovs_interfaceid": "1fed049a-d415-4db7-a8c2-d32664f0324b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 844.066507] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:05:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd646f9d5-d2ad-4c22-bea5-85a965334de6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1fed049a-d415-4db7-a8c2-d32664f0324b', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 844.074853] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Creating folder: Project (e53a2ba19a0d4e1e86650549bc1f32c0). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 844.075161] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20ebdf56-82c0-4f3c-b41c-b02de7d3d242 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.090044] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Created folder: Project (e53a2ba19a0d4e1e86650549bc1f32c0) in parent group-v225918. [ 844.090272] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Creating folder: Instances. Parent ref: group-v226066. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 844.090734] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d80c6ba-70e1-4bdd-997b-86ba10ba0969 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.101822] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Created folder: Instances in parent group-v226066. [ 844.102112] env[63345]: DEBUG oslo.service.loopingcall [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 844.102336] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 844.102554] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0338a256-319c-4bf7-a67a-deb789b90ccc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.126096] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.126096] env[63345]: value = "task-1017240" [ 844.126096] env[63345]: _type = "Task" [ 844.126096] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.136974] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017240, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.313083] env[63345]: DEBUG oslo_vmware.api [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017236, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151941} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.313382] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 844.313578] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Deleted contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 844.313767] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 844.313946] env[63345]: INFO nova.compute.manager [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Took 1.14 seconds to destroy the instance on the hypervisor. [ 844.314213] env[63345]: DEBUG oslo.service.loopingcall [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 844.314453] env[63345]: DEBUG nova.compute.manager [-] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 844.314617] env[63345]: DEBUG nova.network.neutron [-] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 844.397673] env[63345]: INFO nova.compute.resource_tracker [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Updating resource usage from migration 606711cc-3f86-47b3-9a65-4efb46ca0c65 [ 844.406538] env[63345]: DEBUG nova.compute.manager [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 844.507945] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.520068] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017237, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.573288] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524767b7-68f7-b221-1761-935c76125639, 'name': SearchDatastore_Task, 'duration_secs': 0.010005} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.577727] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.577990] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] dde93fd5-6312-4d91-b041-b7fc84b207d3/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk. {{(pid=63345) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 844.579160] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff00d09f-5649-414e-8441-54305baebc64 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.587499] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 844.587499] env[63345]: value = "task-1017241" [ 844.587499] env[63345]: _type = "Task" [ 844.587499] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.604108] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017241, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.638589] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017240, 'name': CreateVM_Task, 'duration_secs': 0.412379} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.641996] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 844.643661] env[63345]: DEBUG oslo_concurrency.lockutils [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.643661] env[63345]: DEBUG oslo_concurrency.lockutils [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.644236] env[63345]: DEBUG oslo_concurrency.lockutils [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 844.644540] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d58aaf6b-f658-4051-9504-010d7f6cda40 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.650173] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Waiting for the task: (returnval){ [ 844.650173] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b00fb1-932c-c16b-5521-a5499a1e371f" [ 844.650173] env[63345]: _type = "Task" [ 844.650173] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.664365] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b00fb1-932c-c16b-5521-a5499a1e371f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.750029] env[63345]: DEBUG nova.compute.manager [req-d6d2f065-29af-409e-83bc-479c77101053 req-fd0ffb0a-5295-4268-89fd-d1d129d21fc0 service nova] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Received event network-changed-1fed049a-d415-4db7-a8c2-d32664f0324b {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 844.750343] env[63345]: DEBUG nova.compute.manager [req-d6d2f065-29af-409e-83bc-479c77101053 req-fd0ffb0a-5295-4268-89fd-d1d129d21fc0 service nova] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Refreshing instance network info cache due to event network-changed-1fed049a-d415-4db7-a8c2-d32664f0324b. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 844.750594] env[63345]: DEBUG oslo_concurrency.lockutils [req-d6d2f065-29af-409e-83bc-479c77101053 req-fd0ffb0a-5295-4268-89fd-d1d129d21fc0 service nova] Acquiring lock "refresh_cache-a3f34e0e-2969-406f-a086-a925549e458e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.750748] env[63345]: DEBUG oslo_concurrency.lockutils [req-d6d2f065-29af-409e-83bc-479c77101053 req-fd0ffb0a-5295-4268-89fd-d1d129d21fc0 service nova] Acquired lock "refresh_cache-a3f34e0e-2969-406f-a086-a925549e458e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.750915] env[63345]: DEBUG nova.network.neutron [req-d6d2f065-29af-409e-83bc-479c77101053 req-fd0ffb0a-5295-4268-89fd-d1d129d21fc0 service nova] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Refreshing network info cache for port 1fed049a-d415-4db7-a8c2-d32664f0324b {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 844.851824] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a0d70b-cf63-48d0-a0e3-1ffe1d336a6f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.861607] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3e3eed-78ca-42bc-b443-5c0b32d6ea92 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.909155] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77eefb4f-c142-429a-aa18-9ac35489e485 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.922569] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f35f501d-e21f-4d22-bbd9-c464f4a75df8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.939719] env[63345]: DEBUG nova.compute.provider_tree [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.018577] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017237, 'name': PowerOnVM_Task, 'duration_secs': 0.531901} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.018894] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 845.019150] env[63345]: DEBUG nova.compute.manager [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 845.020086] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1477b278-8402-473c-8c60-fb51d9403ce0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.075081] env[63345]: DEBUG nova.network.neutron [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Successfully created port: 1ebdc50b-86da-4bb4-8884-530d087bf7dd {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 845.100829] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017241, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.103277] env[63345]: DEBUG nova.compute.manager [req-ea1d67f5-45ee-4b97-9130-9f770d67c6e5 req-9f672219-d733-46fe-9f54-e09b34714e4b service nova] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Received event network-vif-deleted-a8479a06-71fa-42d6-a093-13fcbbae3778 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 845.103450] env[63345]: INFO nova.compute.manager [req-ea1d67f5-45ee-4b97-9130-9f770d67c6e5 req-9f672219-d733-46fe-9f54-e09b34714e4b service nova] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Neutron deleted interface a8479a06-71fa-42d6-a093-13fcbbae3778; detaching it from the instance and deleting it from the info cache [ 845.104149] env[63345]: DEBUG nova.network.neutron [req-ea1d67f5-45ee-4b97-9130-9f770d67c6e5 req-9f672219-d733-46fe-9f54-e09b34714e4b service nova] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.123744] env[63345]: DEBUG nova.network.neutron [-] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.166503] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b00fb1-932c-c16b-5521-a5499a1e371f, 'name': SearchDatastore_Task, 'duration_secs': 0.013098} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.166888] env[63345]: DEBUG oslo_concurrency.lockutils [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.167167] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 845.167533] env[63345]: DEBUG oslo_concurrency.lockutils [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.167619] env[63345]: DEBUG oslo_concurrency.lockutils [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.167810] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 845.168068] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a616f54-361a-4c88-bd57-54b3e893e4bb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.180304] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 845.180543] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 845.181359] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f97df15-0bdb-4aeb-a822-c41a8dac1335 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.195728] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Waiting for the task: (returnval){ [ 845.195728] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5205e395-4b92-4949-41fc-8eaf8b060b9e" [ 845.195728] env[63345]: _type = "Task" [ 845.195728] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.208695] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5205e395-4b92-4949-41fc-8eaf8b060b9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.418555] env[63345]: DEBUG nova.compute.manager [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 845.442562] env[63345]: DEBUG nova.scheduler.client.report [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 845.452807] env[63345]: DEBUG nova.virt.hardware [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 845.452880] env[63345]: DEBUG nova.virt.hardware [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 845.454492] env[63345]: DEBUG nova.virt.hardware [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 845.454492] env[63345]: DEBUG nova.virt.hardware [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 845.454492] env[63345]: DEBUG nova.virt.hardware [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 845.454492] env[63345]: DEBUG nova.virt.hardware [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 845.454492] env[63345]: DEBUG nova.virt.hardware [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 845.454492] env[63345]: DEBUG nova.virt.hardware [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 845.454492] env[63345]: DEBUG nova.virt.hardware [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 845.454492] env[63345]: DEBUG nova.virt.hardware [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 845.454492] env[63345]: DEBUG nova.virt.hardware [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 845.455389] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e839541-0b20-498b-9d83-21c17dd4d62e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.465020] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a6e4d5-f64c-4f3e-be89-7bc7b716d096 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.471767] env[63345]: DEBUG nova.network.neutron [req-d6d2f065-29af-409e-83bc-479c77101053 req-fd0ffb0a-5295-4268-89fd-d1d129d21fc0 service nova] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Updated VIF entry in instance network info cache for port 1fed049a-d415-4db7-a8c2-d32664f0324b. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 845.472102] env[63345]: DEBUG nova.network.neutron [req-d6d2f065-29af-409e-83bc-479c77101053 req-fd0ffb0a-5295-4268-89fd-d1d129d21fc0 service nova] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Updating instance_info_cache with network_info: [{"id": "1fed049a-d415-4db7-a8c2-d32664f0324b", "address": "fa:16:3e:d7:05:68", "network": {"id": "68417cd9-f7c6-4829-9b9a-142934679bcb", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-2049413518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e53a2ba19a0d4e1e86650549bc1f32c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d646f9d5-d2ad-4c22-bea5-85a965334de6", "external-id": "nsx-vlan-transportzone-606", "segmentation_id": 606, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fed049a-d4", "ovs_interfaceid": "1fed049a-d415-4db7-a8c2-d32664f0324b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.533306] env[63345]: INFO nova.compute.manager [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] bringing vm to original state: 'stopped' [ 845.599932] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017241, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.689669} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.600269] env[63345]: INFO nova.virt.vmwareapi.ds_util [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] dde93fd5-6312-4d91-b041-b7fc84b207d3/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk. [ 845.601144] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32eb79de-f2d9-48df-ae4b-a6a8ce1c387e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.606845] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2eae1521-2976-4267-8574-76a820dd3225 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.632250] env[63345]: INFO nova.compute.manager [-] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Took 1.32 seconds to deallocate network for instance. [ 845.640141] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] dde93fd5-6312-4d91-b041-b7fc84b207d3/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 845.642212] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-affd7e89-b336-454c-9b40-b0af19e695d1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.659728] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28cb30ac-718c-4852-9d94-07dc09bf3769 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.681287] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 845.681287] env[63345]: value = "task-1017242" [ 845.681287] env[63345]: _type = "Task" [ 845.681287] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.690547] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017242, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.701892] env[63345]: DEBUG nova.compute.manager [req-ea1d67f5-45ee-4b97-9130-9f770d67c6e5 req-9f672219-d733-46fe-9f54-e09b34714e4b service nova] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Detach interface failed, port_id=a8479a06-71fa-42d6-a093-13fcbbae3778, reason: Instance f37b4a95-0725-4a84-b726-fd4f26e87020 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 845.714763] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5205e395-4b92-4949-41fc-8eaf8b060b9e, 'name': SearchDatastore_Task, 'duration_secs': 0.025772} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.715679] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0aecff21-71c5-435b-b5b5-e375ca794514 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.721614] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Waiting for the task: (returnval){ [ 845.721614] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52313c6d-6860-b1a9-4514-80632035ce17" [ 845.721614] env[63345]: _type = "Task" [ 845.721614] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.731940] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52313c6d-6860-b1a9-4514-80632035ce17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.949674] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.568s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.949674] env[63345]: INFO nova.compute.manager [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Migrating [ 845.956923] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.403s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.958457] env[63345]: INFO nova.compute.claims [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 845.982815] env[63345]: DEBUG oslo_concurrency.lockutils [req-d6d2f065-29af-409e-83bc-479c77101053 req-fd0ffb0a-5295-4268-89fd-d1d129d21fc0 service nova] Releasing lock "refresh_cache-a3f34e0e-2969-406f-a086-a925549e458e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.982815] env[63345]: DEBUG nova.compute.manager [req-d6d2f065-29af-409e-83bc-479c77101053 req-fd0ffb0a-5295-4268-89fd-d1d129d21fc0 service nova] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Received event network-vif-deleted-4ea770ff-4619-4df2-b09f-53b1fdc250e5 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 846.176367] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.190924] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017242, 'name': ReconfigVM_Task, 'duration_secs': 0.336518} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.191278] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Reconfigured VM instance instance-00000048 to attach disk [datastore1] dde93fd5-6312-4d91-b041-b7fc84b207d3/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.192214] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13d6f40-9f61-440d-8886-d9155c6668e1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.218521] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2166ce94-ab65-473b-b3a3-17a781c904d5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.238041] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52313c6d-6860-b1a9-4514-80632035ce17, 'name': SearchDatastore_Task, 'duration_secs': 0.010471} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.239444] env[63345]: DEBUG oslo_concurrency.lockutils [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.239713] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] a3f34e0e-2969-406f-a086-a925549e458e/a3f34e0e-2969-406f-a086-a925549e458e.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 846.240045] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 846.240045] env[63345]: value = "task-1017243" [ 846.240045] env[63345]: _type = "Task" [ 846.240045] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.240242] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f367ab41-e9b2-4427-85e4-ee3040ce16b7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.250832] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017243, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.252021] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Waiting for the task: (returnval){ [ 846.252021] env[63345]: value = "task-1017244" [ 846.252021] env[63345]: _type = "Task" [ 846.252021] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.260755] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': task-1017244, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.469753] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "refresh_cache-11652422-9136-4453-b932-06695f9bc910" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.470099] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "refresh_cache-11652422-9136-4453-b932-06695f9bc910" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.470154] env[63345]: DEBUG nova.network.neutron [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 846.540032] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.540141] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.540270] env[63345]: DEBUG nova.compute.manager [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 846.541271] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1592fa5b-b2ac-4d8a-9512-ca8b56767d05 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.548613] env[63345]: DEBUG nova.compute.manager [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63345) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3403}} [ 846.706079] env[63345]: DEBUG oslo_concurrency.lockutils [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Acquiring lock "ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.706079] env[63345]: DEBUG oslo_concurrency.lockutils [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Lock "ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.756474] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017243, 'name': ReconfigVM_Task, 'duration_secs': 0.167477} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.761560] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 846.762544] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9da93674-4a6e-4e60-8c92-9c8526bc627a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.771304] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': task-1017244, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.773640] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 846.773640] env[63345]: value = "task-1017245" [ 846.773640] env[63345]: _type = "Task" [ 846.773640] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.785675] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017245, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.944338] env[63345]: DEBUG nova.compute.manager [req-4e982bed-d184-4ebf-a1bf-82a8d54c36c7 req-0574f57a-935b-4742-90cf-90bce05265b1 service nova] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Received event network-vif-plugged-1ebdc50b-86da-4bb4-8884-530d087bf7dd {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 846.944338] env[63345]: DEBUG oslo_concurrency.lockutils [req-4e982bed-d184-4ebf-a1bf-82a8d54c36c7 req-0574f57a-935b-4742-90cf-90bce05265b1 service nova] Acquiring lock "1e349d03-6cae-4322-9941-d48c52c21c0e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.944338] env[63345]: DEBUG oslo_concurrency.lockutils [req-4e982bed-d184-4ebf-a1bf-82a8d54c36c7 req-0574f57a-935b-4742-90cf-90bce05265b1 service nova] Lock "1e349d03-6cae-4322-9941-d48c52c21c0e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.944338] env[63345]: DEBUG oslo_concurrency.lockutils [req-4e982bed-d184-4ebf-a1bf-82a8d54c36c7 req-0574f57a-935b-4742-90cf-90bce05265b1 service nova] Lock "1e349d03-6cae-4322-9941-d48c52c21c0e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.944338] env[63345]: DEBUG nova.compute.manager [req-4e982bed-d184-4ebf-a1bf-82a8d54c36c7 req-0574f57a-935b-4742-90cf-90bce05265b1 service nova] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] No waiting events found dispatching network-vif-plugged-1ebdc50b-86da-4bb4-8884-530d087bf7dd {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 846.945096] env[63345]: WARNING nova.compute.manager [req-4e982bed-d184-4ebf-a1bf-82a8d54c36c7 req-0574f57a-935b-4742-90cf-90bce05265b1 service nova] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Received unexpected event network-vif-plugged-1ebdc50b-86da-4bb4-8884-530d087bf7dd for instance with vm_state building and task_state spawning. [ 847.056109] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 847.056491] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd82d240-fff8-49f0-ab6f-e67525df9026 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.065705] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 847.065705] env[63345]: value = "task-1017246" [ 847.065705] env[63345]: _type = "Task" [ 847.065705] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.075063] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017246, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.208315] env[63345]: DEBUG nova.compute.manager [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 847.275989] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': task-1017244, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.657801} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.279437] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] a3f34e0e-2969-406f-a086-a925549e458e/a3f34e0e-2969-406f-a086-a925549e458e.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 847.279743] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 847.280437] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7b635f93-eb3c-4b62-b3c9-6adae1dc6f98 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.290586] env[63345]: DEBUG oslo_vmware.api [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017245, 'name': PowerOnVM_Task, 'duration_secs': 0.499195} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.292258] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 847.294447] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Waiting for the task: (returnval){ [ 847.294447] env[63345]: value = "task-1017247" [ 847.294447] env[63345]: _type = "Task" [ 847.294447] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.298996] env[63345]: DEBUG nova.compute.manager [None req-1edba048-317d-4421-a27f-de7e032d598a tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 847.303849] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb112f5-f867-4304-9fb0-b277ef38b756 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.313784] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': task-1017247, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.434028] env[63345]: DEBUG nova.network.neutron [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Updating instance_info_cache with network_info: [{"id": "71b7616d-5472-4d3c-a8ca-6984d7c70c12", "address": "fa:16:3e:ba:da:55", "network": {"id": "80bb8388-e130-46af-a4fc-1daea51d1bf5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1343573007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "964cee117b3c4601b3afe82a8bb9c23e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71b7616d-54", "ovs_interfaceid": "71b7616d-5472-4d3c-a8ca-6984d7c70c12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.443882] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7983e129-c9b3-4020-bcd5-668f834319c0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.452503] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49644152-9096-4327-bac4-67fb661dcdca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.483725] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c25806-4720-4b7c-a0c8-fda1c65b2ebf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.491606] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9bf8194-c2c5-4bdb-b770-fbb57daaf31b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.509128] env[63345]: DEBUG nova.compute.provider_tree [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 847.510941] env[63345]: DEBUG nova.network.neutron [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Successfully updated port: 1ebdc50b-86da-4bb4-8884-530d087bf7dd {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 847.551615] env[63345]: DEBUG nova.compute.manager [req-bcc77022-8f65-42aa-b7f8-83e4c2b9ecee req-dcdcdc47-30f5-478f-9522-24b001a21c50 service nova] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Received event network-changed-1ebdc50b-86da-4bb4-8884-530d087bf7dd {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 847.551820] env[63345]: DEBUG nova.compute.manager [req-bcc77022-8f65-42aa-b7f8-83e4c2b9ecee req-dcdcdc47-30f5-478f-9522-24b001a21c50 service nova] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Refreshing instance network info cache due to event network-changed-1ebdc50b-86da-4bb4-8884-530d087bf7dd. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 847.552055] env[63345]: DEBUG oslo_concurrency.lockutils [req-bcc77022-8f65-42aa-b7f8-83e4c2b9ecee req-dcdcdc47-30f5-478f-9522-24b001a21c50 service nova] Acquiring lock "refresh_cache-1e349d03-6cae-4322-9941-d48c52c21c0e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.552207] env[63345]: DEBUG oslo_concurrency.lockutils [req-bcc77022-8f65-42aa-b7f8-83e4c2b9ecee req-dcdcdc47-30f5-478f-9522-24b001a21c50 service nova] Acquired lock "refresh_cache-1e349d03-6cae-4322-9941-d48c52c21c0e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.552529] env[63345]: DEBUG nova.network.neutron [req-bcc77022-8f65-42aa-b7f8-83e4c2b9ecee req-dcdcdc47-30f5-478f-9522-24b001a21c50 service nova] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Refreshing network info cache for port 1ebdc50b-86da-4bb4-8884-530d087bf7dd {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 847.574764] env[63345]: DEBUG oslo_vmware.api [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017246, 'name': PowerOffVM_Task, 'duration_secs': 0.233169} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.575117] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 847.575405] env[63345]: DEBUG nova.compute.manager [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 847.576246] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4fce52-ea20-4987-921f-3df0d4ceef87 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.727634] env[63345]: DEBUG oslo_concurrency.lockutils [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.806473] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': task-1017247, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066228} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.806759] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 847.807564] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41366db3-072f-419f-8ce3-5d3651082e98 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.830214] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] a3f34e0e-2969-406f-a086-a925549e458e/a3f34e0e-2969-406f-a086-a925549e458e.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 847.832895] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eae99edb-d38b-471b-ba3f-458c6230f157 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.853740] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Waiting for the task: (returnval){ [ 847.853740] env[63345]: value = "task-1017248" [ 847.853740] env[63345]: _type = "Task" [ 847.853740] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.862364] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': task-1017248, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.936948] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "refresh_cache-11652422-9136-4453-b932-06695f9bc910" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.013755] env[63345]: DEBUG nova.scheduler.client.report [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 848.018034] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "refresh_cache-1e349d03-6cae-4322-9941-d48c52c21c0e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.088864] env[63345]: DEBUG nova.network.neutron [req-bcc77022-8f65-42aa-b7f8-83e4c2b9ecee req-dcdcdc47-30f5-478f-9522-24b001a21c50 service nova] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 848.091067] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.551s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.222581] env[63345]: DEBUG nova.network.neutron [req-bcc77022-8f65-42aa-b7f8-83e4c2b9ecee req-dcdcdc47-30f5-478f-9522-24b001a21c50 service nova] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.364527] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': task-1017248, 'name': ReconfigVM_Task, 'duration_secs': 0.286989} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.364830] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Reconfigured VM instance instance-0000004a to attach disk [datastore2] a3f34e0e-2969-406f-a086-a925549e458e/a3f34e0e-2969-406f-a086-a925549e458e.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 848.365483] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b6fe0649-a99c-4bf4-b1fe-3da879188499 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.373224] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Waiting for the task: (returnval){ [ 848.373224] env[63345]: value = "task-1017249" [ 848.373224] env[63345]: _type = "Task" [ 848.373224] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.384245] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': task-1017249, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.520411] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.563s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.520973] env[63345]: DEBUG nova.compute.manager [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 848.524211] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.746s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.524466] env[63345]: DEBUG nova.objects.instance [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lazy-loading 'resources' on Instance uuid 4f108dcc-c130-4c3f-840d-7a912150db3f {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 848.599898] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.726036] env[63345]: DEBUG oslo_concurrency.lockutils [req-bcc77022-8f65-42aa-b7f8-83e4c2b9ecee req-dcdcdc47-30f5-478f-9522-24b001a21c50 service nova] Releasing lock "refresh_cache-1e349d03-6cae-4322-9941-d48c52c21c0e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.726136] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "refresh_cache-1e349d03-6cae-4322-9941-d48c52c21c0e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.726789] env[63345]: DEBUG nova.network.neutron [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 848.884344] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': task-1017249, 'name': Rename_Task, 'duration_secs': 0.144038} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.884900] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 848.885314] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a30b50a-b505-4344-ac3c-af8e334c81d0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.893950] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Waiting for the task: (returnval){ [ 848.893950] env[63345]: value = "task-1017250" [ 848.893950] env[63345]: _type = "Task" [ 848.893950] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.902935] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': task-1017250, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.028045] env[63345]: DEBUG nova.compute.utils [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 849.028045] env[63345]: DEBUG nova.compute.manager [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 849.028045] env[63345]: DEBUG nova.network.neutron [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 849.103090] env[63345]: DEBUG nova.policy [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f522b76a59a649a0a8570a4e8b8da753', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2b389a73e7804452b23d8c00bedd0362', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 849.251470] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "a415d4f2-abc7-4553-8442-312316e686b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.252023] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "a415d4f2-abc7-4553-8442-312316e686b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.260942] env[63345]: DEBUG nova.network.neutron [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 849.412732] env[63345]: DEBUG oslo_vmware.api [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': task-1017250, 'name': PowerOnVM_Task, 'duration_secs': 0.449758} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.413260] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 849.413385] env[63345]: INFO nova.compute.manager [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Took 8.59 seconds to spawn the instance on the hypervisor. [ 849.413583] env[63345]: DEBUG nova.compute.manager [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 849.415057] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af74f3c2-45da-40c5-ad10-05c58a1e24d7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.449685] env[63345]: DEBUG nova.network.neutron [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Updating instance_info_cache with network_info: [{"id": "1ebdc50b-86da-4bb4-8884-530d087bf7dd", "address": "fa:16:3e:81:50:ee", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ebdc50b-86", "ovs_interfaceid": "1ebdc50b-86da-4bb4-8884-530d087bf7dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.458732] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ae73f4-8d58-4439-9d72-fb16134ad4ca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.482242] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Updating instance '11652422-9136-4453-b932-06695f9bc910' progress to 0 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 849.488394] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f34cd70-cfea-493e-8303-ebc54c363eb1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.497176] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c57467a-dd36-4f97-a75d-d219ca8702e4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.531809] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b456c9d-22d8-4e9a-ba81-164c7de29df2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.537275] env[63345]: DEBUG nova.compute.manager [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 849.547016] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff767d3-751d-493c-8708-c9c50720605e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.564016] env[63345]: DEBUG nova.compute.provider_tree [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 849.702644] env[63345]: DEBUG nova.network.neutron [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Successfully created port: 2bb8e7ea-091a-4a60-9a2b-e9b196790b55 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 849.756112] env[63345]: DEBUG nova.compute.manager [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 849.935372] env[63345]: INFO nova.compute.manager [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Took 31.14 seconds to build instance. [ 849.956866] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "refresh_cache-1e349d03-6cae-4322-9941-d48c52c21c0e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.957212] env[63345]: DEBUG nova.compute.manager [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Instance network_info: |[{"id": "1ebdc50b-86da-4bb4-8884-530d087bf7dd", "address": "fa:16:3e:81:50:ee", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ebdc50b-86", "ovs_interfaceid": "1ebdc50b-86da-4bb4-8884-530d087bf7dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 849.957940] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:50:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f77ff7a1-209c-4f3f-b2a0-fd817741e739', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ebdc50b-86da-4bb4-8884-530d087bf7dd', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 849.965902] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Creating folder: Project (c535ae9067ab4e8a87e95c68af4624fb). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 849.966450] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-434955e1-01ec-4fe5-9f61-aa5bd569a99c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.980029] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Created folder: Project (c535ae9067ab4e8a87e95c68af4624fb) in parent group-v225918. [ 849.980283] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Creating folder: Instances. Parent ref: group-v226069. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 849.980562] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cc5c68cf-d509-4d5f-a00f-ba72f3bffced {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.990863] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Created folder: Instances in parent group-v226069. [ 849.991169] env[63345]: DEBUG oslo.service.loopingcall [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 849.991466] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 849.993204] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 849.993453] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-479cd2be-aee7-46c1-97ee-eed8c2ba4334 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.008828] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c8d810c-06d0-4be3-a837-462777b09717 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.018399] env[63345]: DEBUG oslo_vmware.api [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 850.018399] env[63345]: value = "task-1017254" [ 850.018399] env[63345]: _type = "Task" [ 850.018399] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.019632] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 850.019632] env[63345]: value = "task-1017253" [ 850.019632] env[63345]: _type = "Task" [ 850.019632] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.032222] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017253, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.065392] env[63345]: DEBUG nova.scheduler.client.report [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 850.199187] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.199592] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.199843] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.200108] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.200329] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.203734] env[63345]: INFO nova.compute.manager [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Terminating instance [ 850.281249] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.438704] env[63345]: DEBUG oslo_concurrency.lockutils [None req-db2406f1-c5b2-4c45-b8f0-1949aed52a8c tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Lock "a3f34e0e-2969-406f-a086-a925549e458e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.959s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.537256] env[63345]: DEBUG oslo_vmware.api [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017254, 'name': PowerOffVM_Task, 'duration_secs': 0.225345} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.537523] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017253, 'name': CreateVM_Task, 'duration_secs': 0.426401} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.537935] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 850.538146] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Updating instance '11652422-9136-4453-b932-06695f9bc910' progress to 17 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 850.541423] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 850.542853] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.543033] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.543365] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 850.543828] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7ad8ae4-c819-440c-a3cb-1085959c7fcb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.548137] env[63345]: DEBUG nova.compute.manager [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 850.551445] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 850.551445] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52e5a609-bd04-4f66-6cf4-5f825435ffaa" [ 850.551445] env[63345]: _type = "Task" [ 850.551445] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.559757] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e5a609-bd04-4f66-6cf4-5f825435ffaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.571663] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.047s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.573661] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.026s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.576019] env[63345]: INFO nova.compute.claims [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 850.580372] env[63345]: DEBUG nova.virt.hardware [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 850.580592] env[63345]: DEBUG nova.virt.hardware [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 850.580763] env[63345]: DEBUG nova.virt.hardware [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 850.580957] env[63345]: DEBUG nova.virt.hardware [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 850.581196] env[63345]: DEBUG nova.virt.hardware [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 850.581369] env[63345]: DEBUG nova.virt.hardware [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 850.581581] env[63345]: DEBUG nova.virt.hardware [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 850.581867] env[63345]: DEBUG nova.virt.hardware [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 850.581932] env[63345]: DEBUG nova.virt.hardware [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 850.582092] env[63345]: DEBUG nova.virt.hardware [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 850.582275] env[63345]: DEBUG nova.virt.hardware [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 850.583820] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48696525-63cb-4f7d-a052-9b9a9806c6f1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.592912] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea68ebe-18df-4349-ad82-15cfd06ee46a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.597025] env[63345]: INFO nova.scheduler.client.report [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Deleted allocations for instance 4f108dcc-c130-4c3f-840d-7a912150db3f [ 850.615354] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "070a834d-6478-4705-8df0-2a27c8780507" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.615616] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "070a834d-6478-4705-8df0-2a27c8780507" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.615803] env[63345]: INFO nova.compute.manager [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Shelving [ 850.712025] env[63345]: DEBUG nova.compute.manager [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 850.712025] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 850.712765] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b867f9-9aa9-4053-b2ea-4ca54fb934f2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.721598] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 850.721598] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1f0fc14-40c5-416d-9cbd-85be48b508e9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.816071] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 850.816376] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 850.816608] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleting the datastore file [datastore2] 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 850.817178] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26374341-11f8-4543-9f29-e598051b9894 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.824201] env[63345]: DEBUG oslo_vmware.api [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 850.824201] env[63345]: value = "task-1017256" [ 850.824201] env[63345]: _type = "Task" [ 850.824201] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.832857] env[63345]: DEBUG oslo_vmware.api [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017256, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.038763] env[63345]: DEBUG oslo_concurrency.lockutils [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Acquiring lock "a3f34e0e-2969-406f-a086-a925549e458e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.038867] env[63345]: DEBUG oslo_concurrency.lockutils [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Lock "a3f34e0e-2969-406f-a086-a925549e458e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.039240] env[63345]: DEBUG oslo_concurrency.lockutils [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Acquiring lock "a3f34e0e-2969-406f-a086-a925549e458e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.039537] env[63345]: DEBUG oslo_concurrency.lockutils [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Lock "a3f34e0e-2969-406f-a086-a925549e458e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.039875] env[63345]: DEBUG oslo_concurrency.lockutils [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Lock "a3f34e0e-2969-406f-a086-a925549e458e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.042116] env[63345]: INFO nova.compute.manager [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Terminating instance [ 851.045594] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 851.046092] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 851.046092] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 851.046291] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 851.046557] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 851.046747] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 851.046966] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 851.047303] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 851.047360] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 851.047509] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 851.047737] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 851.054058] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0816eaea-80cf-400b-9031-83b203bf0aa7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.065122] env[63345]: DEBUG nova.compute.manager [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 851.065340] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 851.069276] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9929be9d-2bcc-4ce7-aef2-59a8d4120240 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.080549] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e5a609-bd04-4f66-6cf4-5f825435ffaa, 'name': SearchDatastore_Task, 'duration_secs': 0.016582} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.081142] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 851.082377] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.082616] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 851.082856] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.082999] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.083197] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 851.083449] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f1dc581-13b7-4a64-813e-e4807d350df1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.085081] env[63345]: DEBUG oslo_vmware.api [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 851.085081] env[63345]: value = "task-1017257" [ 851.085081] env[63345]: _type = "Task" [ 851.085081] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.085273] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c71e833b-9259-441d-8c18-0f115326d582 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.093988] env[63345]: DEBUG oslo_vmware.api [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Waiting for the task: (returnval){ [ 851.093988] env[63345]: value = "task-1017258" [ 851.093988] env[63345]: _type = "Task" [ 851.093988] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.101990] env[63345]: DEBUG oslo_vmware.api [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': task-1017258, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.110670] env[63345]: DEBUG oslo_vmware.api [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017257, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.110670] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6b756e41-bd5f-4ed4-85a5-3e14888ede05 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "4f108dcc-c130-4c3f-840d-7a912150db3f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.682s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.112752] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 851.115394] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 851.115394] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f46b93a-94cb-44a6-8ec2-51332d3eca9b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.122712] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 851.122712] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52fe79ba-9553-91b8-5352-54e799926b62" [ 851.122712] env[63345]: _type = "Task" [ 851.122712] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.137876] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52fe79ba-9553-91b8-5352-54e799926b62, 'name': SearchDatastore_Task, 'duration_secs': 0.009984} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.138810] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfc271ca-24ac-4526-8300-911661174c88 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.147704] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 851.147704] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52cea38d-83d7-2334-91e4-6a128606e16a" [ 851.147704] env[63345]: _type = "Task" [ 851.147704] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.154486] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52cea38d-83d7-2334-91e4-6a128606e16a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.333950] env[63345]: DEBUG oslo_vmware.api [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017256, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.327025} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.334051] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 851.334205] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 851.334394] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 851.334681] env[63345]: INFO nova.compute.manager [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Took 0.62 seconds to destroy the instance on the hypervisor. [ 851.334908] env[63345]: DEBUG oslo.service.loopingcall [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 851.335140] env[63345]: DEBUG nova.compute.manager [-] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 851.335251] env[63345]: DEBUG nova.network.neutron [-] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 851.425266] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "6cbe136b-5bf6-4f17-bcef-b712d850615f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.425567] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "6cbe136b-5bf6-4f17-bcef-b712d850615f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.425785] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "6cbe136b-5bf6-4f17-bcef-b712d850615f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.425986] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "6cbe136b-5bf6-4f17-bcef-b712d850615f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.426190] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "6cbe136b-5bf6-4f17-bcef-b712d850615f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.428355] env[63345]: INFO nova.compute.manager [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Terminating instance [ 851.602353] env[63345]: DEBUG oslo_vmware.api [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017257, 'name': ReconfigVM_Task, 'duration_secs': 0.193024} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.605936] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Updating instance '11652422-9136-4453-b932-06695f9bc910' progress to 33 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 851.614899] env[63345]: DEBUG oslo_vmware.api [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': task-1017258, 'name': PowerOffVM_Task, 'duration_secs': 0.193976} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.615512] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 851.615582] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 851.616360] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3e1bf56-dde5-43cc-937f-ea8134182e87 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.632040] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 851.632240] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47ca257b-d434-46f1-913a-20dc2c497898 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.639057] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 851.639057] env[63345]: value = "task-1017260" [ 851.639057] env[63345]: _type = "Task" [ 851.639057] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.647283] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017260, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.667201] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52cea38d-83d7-2334-91e4-6a128606e16a, 'name': SearchDatastore_Task, 'duration_secs': 0.010188} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.667201] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.667419] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 1e349d03-6cae-4322-9941-d48c52c21c0e/1e349d03-6cae-4322-9941-d48c52c21c0e.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 851.667698] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c9c2c0e7-7caf-4fad-9dd8-9f86a0776aac {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.678826] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 851.678826] env[63345]: value = "task-1017261" [ 851.678826] env[63345]: _type = "Task" [ 851.678826] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.694871] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017261, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.703747] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 851.704063] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 851.704305] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Deleting the datastore file [datastore2] a3f34e0e-2969-406f-a086-a925549e458e {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 851.707374] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e6d62f1c-edbb-4b8f-b883-77521684336b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.714092] env[63345]: DEBUG oslo_vmware.api [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Waiting for the task: (returnval){ [ 851.714092] env[63345]: value = "task-1017262" [ 851.714092] env[63345]: _type = "Task" [ 851.714092] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.723682] env[63345]: DEBUG oslo_vmware.api [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': task-1017262, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.933264] env[63345]: DEBUG nova.compute.manager [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 851.933536] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 851.934847] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b74afbc-b043-467f-a265-e526c10e4bf1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.943739] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 851.944035] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ad65de0-1d54-447b-9f72-6710399bbe20 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.954663] env[63345]: DEBUG oslo_vmware.api [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 851.954663] env[63345]: value = "task-1017263" [ 851.954663] env[63345]: _type = "Task" [ 851.954663] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.968269] env[63345]: DEBUG oslo_vmware.api [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017263, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.045265] env[63345]: DEBUG nova.compute.manager [req-22b8f70b-ad05-47d0-a52b-dec94f90afa6 req-dcd467a6-7342-48fc-838d-75c0172d33fc service nova] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Received event network-vif-plugged-2bb8e7ea-091a-4a60-9a2b-e9b196790b55 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 852.045494] env[63345]: DEBUG oslo_concurrency.lockutils [req-22b8f70b-ad05-47d0-a52b-dec94f90afa6 req-dcd467a6-7342-48fc-838d-75c0172d33fc service nova] Acquiring lock "75fc8365-bf8d-489e-935f-a5169c6a7e62-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.045703] env[63345]: DEBUG oslo_concurrency.lockutils [req-22b8f70b-ad05-47d0-a52b-dec94f90afa6 req-dcd467a6-7342-48fc-838d-75c0172d33fc service nova] Lock "75fc8365-bf8d-489e-935f-a5169c6a7e62-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.045874] env[63345]: DEBUG oslo_concurrency.lockutils [req-22b8f70b-ad05-47d0-a52b-dec94f90afa6 req-dcd467a6-7342-48fc-838d-75c0172d33fc service nova] Lock "75fc8365-bf8d-489e-935f-a5169c6a7e62-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.046055] env[63345]: DEBUG nova.compute.manager [req-22b8f70b-ad05-47d0-a52b-dec94f90afa6 req-dcd467a6-7342-48fc-838d-75c0172d33fc service nova] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] No waiting events found dispatching network-vif-plugged-2bb8e7ea-091a-4a60-9a2b-e9b196790b55 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 852.046232] env[63345]: WARNING nova.compute.manager [req-22b8f70b-ad05-47d0-a52b-dec94f90afa6 req-dcd467a6-7342-48fc-838d-75c0172d33fc service nova] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Received unexpected event network-vif-plugged-2bb8e7ea-091a-4a60-9a2b-e9b196790b55 for instance with vm_state building and task_state spawning. [ 852.054519] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d71526e1-5b4f-4058-8a53-c8df963547e6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.063558] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bfa851e-9aa3-4d43-a85e-3428352b552c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.069154] env[63345]: DEBUG nova.compute.manager [req-492d6d86-6550-4130-850a-175a70d77ab9 req-f54c12d0-4dba-4ce4-96b6-d64ff6f12eda service nova] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Received event network-vif-deleted-07017fee-f295-4317-9453-e41726d715c5 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 852.069341] env[63345]: INFO nova.compute.manager [req-492d6d86-6550-4130-850a-175a70d77ab9 req-f54c12d0-4dba-4ce4-96b6-d64ff6f12eda service nova] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Neutron deleted interface 07017fee-f295-4317-9453-e41726d715c5; detaching it from the instance and deleting it from the info cache [ 852.069567] env[63345]: DEBUG nova.network.neutron [req-492d6d86-6550-4130-850a-175a70d77ab9 req-f54c12d0-4dba-4ce4-96b6-d64ff6f12eda service nova] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.102603] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef3696a-fb4a-463d-94b0-e7cc49be922e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.105863] env[63345]: DEBUG nova.network.neutron [-] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.113098] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2965f84-8b96-4657-aecb-a84d13ff176b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.119396] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 852.119669] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 852.119807] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 852.120226] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 852.120226] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 852.120365] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 852.120518] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 852.120684] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 852.120858] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 852.121101] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 852.121285] env[63345]: DEBUG nova.virt.hardware [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 852.127623] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Reconfiguring VM instance instance-00000041 to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 852.128641] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-055e0542-eea5-4d10-83c0-23e0e368f136 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.155409] env[63345]: DEBUG nova.compute.provider_tree [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.161024] env[63345]: DEBUG oslo_vmware.api [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 852.161024] env[63345]: value = "task-1017264" [ 852.161024] env[63345]: _type = "Task" [ 852.161024] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.165504] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017260, 'name': PowerOffVM_Task, 'duration_secs': 0.304646} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.166823] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 852.167655] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c7421c-c66a-4291-b893-5e183653fc16 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.173993] env[63345]: DEBUG oslo_vmware.api [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017264, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.193538] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f4479c9-a9c8-4a77-9431-858cf03e48fb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.203709] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017261, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513219} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.205602] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 1e349d03-6cae-4322-9941-d48c52c21c0e/1e349d03-6cae-4322-9941-d48c52c21c0e.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 852.205833] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 852.208272] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bcb129fe-19eb-4f46-a783-18afb0d98d86 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.215430] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 852.215430] env[63345]: value = "task-1017265" [ 852.215430] env[63345]: _type = "Task" [ 852.215430] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.222324] env[63345]: DEBUG nova.network.neutron [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Successfully updated port: 2bb8e7ea-091a-4a60-9a2b-e9b196790b55 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 852.229250] env[63345]: DEBUG oslo_vmware.api [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Task: {'id': task-1017262, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.281569} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.233094] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 852.233094] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 852.233094] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 852.233282] env[63345]: INFO nova.compute.manager [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Took 1.17 seconds to destroy the instance on the hypervisor. [ 852.233511] env[63345]: DEBUG oslo.service.loopingcall [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 852.233807] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017265, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.234066] env[63345]: DEBUG nova.compute.manager [-] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 852.234157] env[63345]: DEBUG nova.network.neutron [-] [instance: a3f34e0e-2969-406f-a086-a925549e458e] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 852.362892] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.363294] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.465453] env[63345]: DEBUG oslo_vmware.api [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017263, 'name': PowerOffVM_Task, 'duration_secs': 0.33437} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.465736] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 852.465912] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 852.466186] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-73c53254-41e6-4e96-a100-e4b4c534e26a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.544436] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 852.544686] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 852.544882] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Deleting the datastore file [datastore2] 6cbe136b-5bf6-4f17-bcef-b712d850615f {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 852.546931] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-32c8f878-fa65-41ff-87f7-4c240813cae1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.553547] env[63345]: DEBUG oslo_vmware.api [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 852.553547] env[63345]: value = "task-1017267" [ 852.553547] env[63345]: _type = "Task" [ 852.553547] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.562402] env[63345]: DEBUG oslo_vmware.api [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017267, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.564714] env[63345]: DEBUG nova.compute.manager [req-4a7668d1-23fc-4b8e-b67e-37a4079f02e0 req-87b5e098-bc08-40c5-9297-9088522a1ce0 service nova] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Received event network-changed-2bb8e7ea-091a-4a60-9a2b-e9b196790b55 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 852.564996] env[63345]: DEBUG nova.compute.manager [req-4a7668d1-23fc-4b8e-b67e-37a4079f02e0 req-87b5e098-bc08-40c5-9297-9088522a1ce0 service nova] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Refreshing instance network info cache due to event network-changed-2bb8e7ea-091a-4a60-9a2b-e9b196790b55. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 852.565193] env[63345]: DEBUG oslo_concurrency.lockutils [req-4a7668d1-23fc-4b8e-b67e-37a4079f02e0 req-87b5e098-bc08-40c5-9297-9088522a1ce0 service nova] Acquiring lock "refresh_cache-75fc8365-bf8d-489e-935f-a5169c6a7e62" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.565330] env[63345]: DEBUG oslo_concurrency.lockutils [req-4a7668d1-23fc-4b8e-b67e-37a4079f02e0 req-87b5e098-bc08-40c5-9297-9088522a1ce0 service nova] Acquired lock "refresh_cache-75fc8365-bf8d-489e-935f-a5169c6a7e62" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.565520] env[63345]: DEBUG nova.network.neutron [req-4a7668d1-23fc-4b8e-b67e-37a4079f02e0 req-87b5e098-bc08-40c5-9297-9088522a1ce0 service nova] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Refreshing network info cache for port 2bb8e7ea-091a-4a60-9a2b-e9b196790b55 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 852.574497] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-028ebfbd-06e5-4dbb-b436-fdf3f8520005 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.585406] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed2f4a1-d2ec-4134-bd32-6bb0b6a634b1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.608685] env[63345]: INFO nova.compute.manager [-] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Took 1.27 seconds to deallocate network for instance. [ 852.620149] env[63345]: DEBUG nova.compute.manager [req-492d6d86-6550-4130-850a-175a70d77ab9 req-f54c12d0-4dba-4ce4-96b6-d64ff6f12eda service nova] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Detach interface failed, port_id=07017fee-f295-4317-9453-e41726d715c5, reason: Instance 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 852.660265] env[63345]: DEBUG nova.scheduler.client.report [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 852.676272] env[63345]: DEBUG oslo_vmware.api [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017264, 'name': ReconfigVM_Task, 'duration_secs': 0.21865} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.677134] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Reconfigured VM instance instance-00000041 to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 852.677983] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9387fe-dc70-4fdf-bf72-e6d19a720037 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.701488] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 11652422-9136-4453-b932-06695f9bc910/11652422-9136-4453-b932-06695f9bc910.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 852.702273] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bdc4b7b-69cb-4a8a-8a92-5d37e2ffa82a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.716882] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Creating Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 852.717287] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-fe3e7ec3-3120-4d9c-9ce6-77d8316d9be0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.725539] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "refresh_cache-75fc8365-bf8d-489e-935f-a5169c6a7e62" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.725917] env[63345]: DEBUG oslo_vmware.api [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 852.725917] env[63345]: value = "task-1017268" [ 852.725917] env[63345]: _type = "Task" [ 852.725917] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.729245] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017265, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093138} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.733036] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 852.733407] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 852.733407] env[63345]: value = "task-1017269" [ 852.733407] env[63345]: _type = "Task" [ 852.733407] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.734380] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cffaa577-0d0e-4f24-89e8-2fb9b694cc03 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.744820] env[63345]: DEBUG oslo_vmware.api [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017268, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.765893] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 1e349d03-6cae-4322-9941-d48c52c21c0e/1e349d03-6cae-4322-9941-d48c52c21c0e.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 852.766286] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017269, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.766857] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50d506f6-856e-4ca9-9b71-551c5e36bb4e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.787117] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 852.787117] env[63345]: value = "task-1017270" [ 852.787117] env[63345]: _type = "Task" [ 852.787117] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.795246] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017270, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.865800] env[63345]: DEBUG nova.compute.manager [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 853.064514] env[63345]: DEBUG oslo_vmware.api [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017267, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18735} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.064514] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 853.064717] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 853.064889] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 853.065091] env[63345]: INFO nova.compute.manager [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 853.065343] env[63345]: DEBUG oslo.service.loopingcall [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 853.065584] env[63345]: DEBUG nova.compute.manager [-] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 853.065732] env[63345]: DEBUG nova.network.neutron [-] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 853.098450] env[63345]: DEBUG nova.network.neutron [req-4a7668d1-23fc-4b8e-b67e-37a4079f02e0 req-87b5e098-bc08-40c5-9297-9088522a1ce0 service nova] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 853.124910] env[63345]: DEBUG nova.network.neutron [-] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.127126] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.167582] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.594s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.168219] env[63345]: DEBUG nova.compute.manager [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 853.172124] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.508s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.172124] env[63345]: DEBUG nova.objects.instance [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Lazy-loading 'resources' on Instance uuid 37f269fe-0266-4c03-9641-e6f43072657a {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 853.194158] env[63345]: DEBUG nova.network.neutron [req-4a7668d1-23fc-4b8e-b67e-37a4079f02e0 req-87b5e098-bc08-40c5-9297-9088522a1ce0 service nova] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.239614] env[63345]: DEBUG oslo_vmware.api [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017268, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.249152] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017269, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.298683] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017270, 'name': ReconfigVM_Task, 'duration_secs': 0.317972} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.299104] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 1e349d03-6cae-4322-9941-d48c52c21c0e/1e349d03-6cae-4322-9941-d48c52c21c0e.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 853.299801] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4daf509-c884-44b3-99f2-6d03f0820963 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.314220] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 853.314220] env[63345]: value = "task-1017271" [ 853.314220] env[63345]: _type = "Task" [ 853.314220] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.328220] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017271, 'name': Rename_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.393975] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.501806] env[63345]: DEBUG nova.objects.instance [None req-6f7ba1b9-7c29-4db7-91cf-25c84ef40b76 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Lazy-loading 'flavor' on Instance uuid 805f9143-a8d8-4995-a20d-3b10ef3ab599 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 853.628568] env[63345]: INFO nova.compute.manager [-] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Took 1.39 seconds to deallocate network for instance. [ 853.678650] env[63345]: DEBUG nova.compute.utils [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 853.685040] env[63345]: DEBUG nova.compute.manager [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 853.685040] env[63345]: DEBUG nova.network.neutron [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 853.696450] env[63345]: DEBUG oslo_concurrency.lockutils [req-4a7668d1-23fc-4b8e-b67e-37a4079f02e0 req-87b5e098-bc08-40c5-9297-9088522a1ce0 service nova] Releasing lock "refresh_cache-75fc8365-bf8d-489e-935f-a5169c6a7e62" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.696846] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired lock "refresh_cache-75fc8365-bf8d-489e-935f-a5169c6a7e62" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.697054] env[63345]: DEBUG nova.network.neutron [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 853.743291] env[63345]: DEBUG oslo_vmware.api [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017268, 'name': ReconfigVM_Task, 'duration_secs': 0.839094} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.749229] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 11652422-9136-4453-b932-06695f9bc910/11652422-9136-4453-b932-06695f9bc910.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 853.749599] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Updating instance '11652422-9136-4453-b932-06695f9bc910' progress to 50 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 853.760198] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017269, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.761223] env[63345]: DEBUG nova.policy [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '738e7097762c42d490a66c3d86af9635', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '41afa63287424a549133615eb390bac7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 853.828143] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017271, 'name': Rename_Task, 'duration_secs': 0.149636} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.832018] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 853.832018] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52a4a1e6-e718-478d-a77d-2f7136c62764 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.839774] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 853.839774] env[63345]: value = "task-1017272" [ 853.839774] env[63345]: _type = "Task" [ 853.839774] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.854589] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017272, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.009501] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6f7ba1b9-7c29-4db7-91cf-25c84ef40b76 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Acquiring lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.009822] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6f7ba1b9-7c29-4db7-91cf-25c84ef40b76 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Acquired lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.060050] env[63345]: DEBUG nova.network.neutron [-] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.122225] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cab751c-6872-40d5-a477-b3e10553d8d3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.133436] env[63345]: DEBUG nova.network.neutron [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Successfully created port: 77f9ccf5-22c3-4c4e-8a43-15543133f465 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 854.136595] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54cefc7f-547e-460f-8199-6d2ef52ef01f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.141942] env[63345]: DEBUG oslo_concurrency.lockutils [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.176204] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf7099cc-c269-4c82-8f32-f307a4ea1bc0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.184519] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd6e612-8be5-4d6a-ac06-317e4c9be14c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.189055] env[63345]: DEBUG nova.compute.manager [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 854.204409] env[63345]: DEBUG nova.compute.provider_tree [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.249549] env[63345]: DEBUG nova.network.neutron [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 854.257122] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017269, 'name': CreateSnapshot_Task, 'duration_secs': 1.428691} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.257802] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b87c9f8a-b76f-4ec9-84b0-d73d3c54f332 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.260511] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Created Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 854.262882] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93d0f6a-a300-4916-af2b-30e1bbeaf5df {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.293892] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-071a7fbe-5ce4-43b8-ad0c-9d2d238efdbe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.322015] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Updating instance '11652422-9136-4453-b932-06695f9bc910' progress to 67 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 854.356538] env[63345]: DEBUG oslo_vmware.api [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017272, 'name': PowerOnVM_Task, 'duration_secs': 0.461354} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.356538] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 854.356538] env[63345]: INFO nova.compute.manager [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Took 8.94 seconds to spawn the instance on the hypervisor. [ 854.356538] env[63345]: DEBUG nova.compute.manager [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 854.356538] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ecea66-02b1-4ab0-b585-d0be07bd2ad9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.550025] env[63345]: DEBUG nova.network.neutron [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Updating instance_info_cache with network_info: [{"id": "2bb8e7ea-091a-4a60-9a2b-e9b196790b55", "address": "fa:16:3e:25:6e:9d", "network": {"id": "ce89b46a-97ec-4f2d-be39-333e9fcf307d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-416012078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b389a73e7804452b23d8c00bedd0362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bb8e7ea-09", "ovs_interfaceid": "2bb8e7ea-091a-4a60-9a2b-e9b196790b55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.562124] env[63345]: INFO nova.compute.manager [-] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Took 1.50 seconds to deallocate network for instance. [ 854.607106] env[63345]: DEBUG nova.network.neutron [None req-6f7ba1b9-7c29-4db7-91cf-25c84ef40b76 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 854.710540] env[63345]: DEBUG nova.scheduler.client.report [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 854.719188] env[63345]: DEBUG nova.compute.manager [req-24a20fc1-2fd2-474f-9d08-8812d3e5da56 req-68972096-8622-4b08-944d-f5786bd90fe4 service nova] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Received event network-vif-deleted-1fed049a-d415-4db7-a8c2-d32664f0324b {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 854.719421] env[63345]: DEBUG nova.compute.manager [req-24a20fc1-2fd2-474f-9d08-8812d3e5da56 req-68972096-8622-4b08-944d-f5786bd90fe4 service nova] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Received event network-vif-deleted-be445772-8a21-4213-b9d3-8852ba3c12ef {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 854.797806] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Creating linked-clone VM from snapshot {{(pid=63345) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 854.798210] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-72e6e022-f4dd-48bd-bcd8-f8f29b501bbc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.810446] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 854.810446] env[63345]: value = "task-1017273" [ 854.810446] env[63345]: _type = "Task" [ 854.810446] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.816996] env[63345]: DEBUG nova.compute.manager [req-de7758b5-ca91-443e-8da9-98424c78e941 req-c3409cf4-b6df-4d24-bd5a-58bab796a6f8 service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Received event network-changed-0ae421d2-83f3-4520-8a37-01cb6a91a3f5 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 854.817473] env[63345]: DEBUG nova.compute.manager [req-de7758b5-ca91-443e-8da9-98424c78e941 req-c3409cf4-b6df-4d24-bd5a-58bab796a6f8 service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Refreshing instance network info cache due to event network-changed-0ae421d2-83f3-4520-8a37-01cb6a91a3f5. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 854.817732] env[63345]: DEBUG oslo_concurrency.lockutils [req-de7758b5-ca91-443e-8da9-98424c78e941 req-c3409cf4-b6df-4d24-bd5a-58bab796a6f8 service nova] Acquiring lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.825545] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017273, 'name': CloneVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.871010] env[63345]: DEBUG nova.network.neutron [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Port 71b7616d-5472-4d3c-a8ca-6984d7c70c12 binding to destination host cpu-1 is already ACTIVE {{(pid=63345) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 854.875235] env[63345]: INFO nova.compute.manager [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Took 32.47 seconds to build instance. [ 855.053428] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Releasing lock "refresh_cache-75fc8365-bf8d-489e-935f-a5169c6a7e62" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.053820] env[63345]: DEBUG nova.compute.manager [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Instance network_info: |[{"id": "2bb8e7ea-091a-4a60-9a2b-e9b196790b55", "address": "fa:16:3e:25:6e:9d", "network": {"id": "ce89b46a-97ec-4f2d-be39-333e9fcf307d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-416012078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b389a73e7804452b23d8c00bedd0362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bb8e7ea-09", "ovs_interfaceid": "2bb8e7ea-091a-4a60-9a2b-e9b196790b55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 855.054291] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:6e:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bd3c6b64-aba2-4bdc-a693-3b4dff3ed861', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2bb8e7ea-091a-4a60-9a2b-e9b196790b55', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 855.064276] env[63345]: DEBUG oslo.service.loopingcall [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 855.064811] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 855.065250] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57912478-d5d5-4c01-8b23-18537bd114b7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.093146] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.100516] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 855.100516] env[63345]: value = "task-1017274" [ 855.100516] env[63345]: _type = "Task" [ 855.100516] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.117271] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017274, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.200082] env[63345]: DEBUG nova.compute.manager [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 855.216334] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.045s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.222021] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.349s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.222021] env[63345]: DEBUG nova.objects.instance [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Lazy-loading 'resources' on Instance uuid cb712d80-be78-4c19-a891-329011521f30 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 855.242845] env[63345]: DEBUG nova.virt.hardware [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 855.243359] env[63345]: DEBUG nova.virt.hardware [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 855.243359] env[63345]: DEBUG nova.virt.hardware [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 855.243535] env[63345]: DEBUG nova.virt.hardware [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 855.243695] env[63345]: DEBUG nova.virt.hardware [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 855.243851] env[63345]: DEBUG nova.virt.hardware [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 855.244259] env[63345]: DEBUG nova.virt.hardware [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 855.244501] env[63345]: DEBUG nova.virt.hardware [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 855.244717] env[63345]: DEBUG nova.virt.hardware [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 855.244906] env[63345]: DEBUG nova.virt.hardware [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 855.245106] env[63345]: DEBUG nova.virt.hardware [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 855.246264] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e25f7e2-e9a9-4eac-bdff-6889186b68da {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.253682] env[63345]: INFO nova.scheduler.client.report [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Deleted allocations for instance 37f269fe-0266-4c03-9641-e6f43072657a [ 855.264040] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990fd466-10fe-4861-8d02-bfd7222eb6bd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.323687] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017273, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.379244] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8f08dcb4-e4b7-481d-af8f-e1e940d3cdcb tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "1e349d03-6cae-4322-9941-d48c52c21c0e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.479s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.409067] env[63345]: DEBUG nova.network.neutron [None req-6f7ba1b9-7c29-4db7-91cf-25c84ef40b76 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Updating instance_info_cache with network_info: [{"id": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "address": "fa:16:3e:3e:4a:12", "network": {"id": "a7e35920-e04b-4676-9e3a-a322585417c0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1167528582-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87516cd599534b94801951669a97a9e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ae421d2-83", "ovs_interfaceid": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.456677] env[63345]: DEBUG oslo_concurrency.lockutils [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "1e349d03-6cae-4322-9941-d48c52c21c0e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.457053] env[63345]: DEBUG oslo_concurrency.lockutils [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "1e349d03-6cae-4322-9941-d48c52c21c0e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.457294] env[63345]: DEBUG oslo_concurrency.lockutils [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "1e349d03-6cae-4322-9941-d48c52c21c0e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.457514] env[63345]: DEBUG oslo_concurrency.lockutils [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "1e349d03-6cae-4322-9941-d48c52c21c0e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.457695] env[63345]: DEBUG oslo_concurrency.lockutils [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "1e349d03-6cae-4322-9941-d48c52c21c0e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.460224] env[63345]: INFO nova.compute.manager [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Terminating instance [ 855.612026] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017274, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.775565] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b6e5e86-a53e-4396-8dd3-5765b17272d1 tempest-ServerPasswordTestJSON-2056804140 tempest-ServerPasswordTestJSON-2056804140-project-member] Lock "37f269fe-0266-4c03-9641-e6f43072657a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.595s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.823514] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017273, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.899857] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "11652422-9136-4453-b932-06695f9bc910-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.900028] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "11652422-9136-4453-b932-06695f9bc910-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.900273] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "11652422-9136-4453-b932-06695f9bc910-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.913235] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6f7ba1b9-7c29-4db7-91cf-25c84ef40b76 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Releasing lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.917020] env[63345]: DEBUG nova.compute.manager [None req-6f7ba1b9-7c29-4db7-91cf-25c84ef40b76 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Inject network info {{(pid=63345) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7548}} [ 855.917020] env[63345]: DEBUG nova.compute.manager [None req-6f7ba1b9-7c29-4db7-91cf-25c84ef40b76 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] network_info to inject: |[{"id": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "address": "fa:16:3e:3e:4a:12", "network": {"id": "a7e35920-e04b-4676-9e3a-a322585417c0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1167528582-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87516cd599534b94801951669a97a9e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ae421d2-83", "ovs_interfaceid": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7549}} [ 855.918495] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6f7ba1b9-7c29-4db7-91cf-25c84ef40b76 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Reconfiguring VM instance to set the machine id {{(pid=63345) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1802}} [ 855.920180] env[63345]: DEBUG oslo_concurrency.lockutils [req-de7758b5-ca91-443e-8da9-98424c78e941 req-c3409cf4-b6df-4d24-bd5a-58bab796a6f8 service nova] Acquired lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.920180] env[63345]: DEBUG nova.network.neutron [req-de7758b5-ca91-443e-8da9-98424c78e941 req-c3409cf4-b6df-4d24-bd5a-58bab796a6f8 service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Refreshing network info cache for port 0ae421d2-83f3-4520-8a37-01cb6a91a3f5 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 855.921396] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbdef4c1-261e-43d2-b8fe-f833768f32ff {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.939282] env[63345]: DEBUG oslo_vmware.api [None req-6f7ba1b9-7c29-4db7-91cf-25c84ef40b76 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Waiting for the task: (returnval){ [ 855.939282] env[63345]: value = "task-1017275" [ 855.939282] env[63345]: _type = "Task" [ 855.939282] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.951024] env[63345]: DEBUG oslo_vmware.api [None req-6f7ba1b9-7c29-4db7-91cf-25c84ef40b76 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1017275, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.964399] env[63345]: DEBUG nova.compute.manager [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 855.964528] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 855.966400] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ec448a-9e17-45ec-a0c1-b5b506708511 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.977840] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 855.978157] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-308adb47-adc3-49b5-bd03-596ad52a72e9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.986961] env[63345]: DEBUG oslo_vmware.api [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 855.986961] env[63345]: value = "task-1017276" [ 855.986961] env[63345]: _type = "Task" [ 855.986961] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.995964] env[63345]: DEBUG oslo_vmware.api [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017276, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.112637] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017274, 'name': CreateVM_Task, 'duration_secs': 0.592576} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.115439] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 856.119020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.119020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.119020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 856.119020] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7a2de80-e6b8-4f5c-a0ce-6e50c8a62c0e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.124038] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 856.124038] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52a265f6-5045-9e3a-d12c-bd396263a86c" [ 856.124038] env[63345]: _type = "Task" [ 856.124038] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.138567] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a265f6-5045-9e3a-d12c-bd396263a86c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.168843] env[63345]: DEBUG nova.network.neutron [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Successfully updated port: 77f9ccf5-22c3-4c4e-8a43-15543133f465 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 856.183140] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869723a1-dfee-42a2-8051-3ccc8ac8170e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.191930] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980eb084-e9ac-44cd-9bf8-97193d7a5374 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.228420] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc90f62f-7b47-4c65-bf14-1da468445829 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.237454] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be7fbc7e-72dc-4dd8-813f-8d6d1ecfdb5b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.254694] env[63345]: DEBUG nova.compute.provider_tree [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.326329] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017273, 'name': CloneVM_Task} progress is 95%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.448278] env[63345]: DEBUG nova.objects.instance [None req-3296ab69-4b72-45da-ad5f-09fae11a5cb8 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Lazy-loading 'flavor' on Instance uuid 805f9143-a8d8-4995-a20d-3b10ef3ab599 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 856.453570] env[63345]: DEBUG oslo_vmware.api [None req-6f7ba1b9-7c29-4db7-91cf-25c84ef40b76 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1017275, 'name': ReconfigVM_Task, 'duration_secs': 0.306541} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.454231] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6f7ba1b9-7c29-4db7-91cf-25c84ef40b76 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Reconfigured VM instance to set the machine id {{(pid=63345) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1805}} [ 856.498760] env[63345]: DEBUG oslo_vmware.api [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017276, 'name': PowerOffVM_Task, 'duration_secs': 0.283231} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.498760] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 856.498975] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 856.499190] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-edfb0b3e-2372-4345-ab24-1ef1c2dd5f03 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.603465] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 856.603766] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 856.603964] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleting the datastore file [datastore2] 1e349d03-6cae-4322-9941-d48c52c21c0e {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 856.604258] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15472d5e-9895-4fc1-9e34-d3762dea2d91 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.613336] env[63345]: DEBUG oslo_vmware.api [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 856.613336] env[63345]: value = "task-1017278" [ 856.613336] env[63345]: _type = "Task" [ 856.613336] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.621617] env[63345]: DEBUG oslo_vmware.api [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017278, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.633934] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a265f6-5045-9e3a-d12c-bd396263a86c, 'name': SearchDatastore_Task, 'duration_secs': 0.01532} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.634231] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.634480] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 856.634818] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.634946] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.635075] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 856.635344] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0cab3b45-fae2-4dde-a2c7-2d43f278135d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.646773] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 856.646996] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 856.647735] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b98c1788-dafb-46ef-a5db-562fb6991893 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.653040] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 856.653040] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]525aefd4-06be-2602-a8b9-8ecdc4ae45bd" [ 856.653040] env[63345]: _type = "Task" [ 856.653040] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.661772] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]525aefd4-06be-2602-a8b9-8ecdc4ae45bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.670469] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "refresh_cache-5e20b33c-1481-4bd3-b269-29a70cc3150d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.670620] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquired lock "refresh_cache-5e20b33c-1481-4bd3-b269-29a70cc3150d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.670813] env[63345]: DEBUG nova.network.neutron [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 856.750626] env[63345]: DEBUG nova.compute.manager [req-65fdfd39-4b04-4abb-8b52-a2ecb6075c10 req-bf5f9078-fd87-4f74-b766-09ee6eee04b4 service nova] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Received event network-vif-plugged-77f9ccf5-22c3-4c4e-8a43-15543133f465 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 856.750803] env[63345]: DEBUG oslo_concurrency.lockutils [req-65fdfd39-4b04-4abb-8b52-a2ecb6075c10 req-bf5f9078-fd87-4f74-b766-09ee6eee04b4 service nova] Acquiring lock "5e20b33c-1481-4bd3-b269-29a70cc3150d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.750994] env[63345]: DEBUG oslo_concurrency.lockutils [req-65fdfd39-4b04-4abb-8b52-a2ecb6075c10 req-bf5f9078-fd87-4f74-b766-09ee6eee04b4 service nova] Lock "5e20b33c-1481-4bd3-b269-29a70cc3150d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.751248] env[63345]: DEBUG oslo_concurrency.lockutils [req-65fdfd39-4b04-4abb-8b52-a2ecb6075c10 req-bf5f9078-fd87-4f74-b766-09ee6eee04b4 service nova] Lock "5e20b33c-1481-4bd3-b269-29a70cc3150d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.751446] env[63345]: DEBUG nova.compute.manager [req-65fdfd39-4b04-4abb-8b52-a2ecb6075c10 req-bf5f9078-fd87-4f74-b766-09ee6eee04b4 service nova] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] No waiting events found dispatching network-vif-plugged-77f9ccf5-22c3-4c4e-8a43-15543133f465 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 856.751619] env[63345]: WARNING nova.compute.manager [req-65fdfd39-4b04-4abb-8b52-a2ecb6075c10 req-bf5f9078-fd87-4f74-b766-09ee6eee04b4 service nova] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Received unexpected event network-vif-plugged-77f9ccf5-22c3-4c4e-8a43-15543133f465 for instance with vm_state building and task_state spawning. [ 856.751877] env[63345]: DEBUG nova.compute.manager [req-65fdfd39-4b04-4abb-8b52-a2ecb6075c10 req-bf5f9078-fd87-4f74-b766-09ee6eee04b4 service nova] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Received event network-changed-77f9ccf5-22c3-4c4e-8a43-15543133f465 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 856.751961] env[63345]: DEBUG nova.compute.manager [req-65fdfd39-4b04-4abb-8b52-a2ecb6075c10 req-bf5f9078-fd87-4f74-b766-09ee6eee04b4 service nova] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Refreshing instance network info cache due to event network-changed-77f9ccf5-22c3-4c4e-8a43-15543133f465. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 856.752115] env[63345]: DEBUG oslo_concurrency.lockutils [req-65fdfd39-4b04-4abb-8b52-a2ecb6075c10 req-bf5f9078-fd87-4f74-b766-09ee6eee04b4 service nova] Acquiring lock "refresh_cache-5e20b33c-1481-4bd3-b269-29a70cc3150d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.757609] env[63345]: DEBUG nova.scheduler.client.report [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 856.771781] env[63345]: DEBUG nova.network.neutron [req-de7758b5-ca91-443e-8da9-98424c78e941 req-c3409cf4-b6df-4d24-bd5a-58bab796a6f8 service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Updated VIF entry in instance network info cache for port 0ae421d2-83f3-4520-8a37-01cb6a91a3f5. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 856.772861] env[63345]: DEBUG nova.network.neutron [req-de7758b5-ca91-443e-8da9-98424c78e941 req-c3409cf4-b6df-4d24-bd5a-58bab796a6f8 service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Updating instance_info_cache with network_info: [{"id": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "address": "fa:16:3e:3e:4a:12", "network": {"id": "a7e35920-e04b-4676-9e3a-a322585417c0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1167528582-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87516cd599534b94801951669a97a9e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ae421d2-83", "ovs_interfaceid": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.826439] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017273, 'name': CloneVM_Task, 'duration_secs': 1.731518} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.826439] env[63345]: INFO nova.virt.vmwareapi.vmops [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Created linked-clone VM from snapshot [ 856.826439] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad450454-72f9-47e7-8424-c7fbb13caf5f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.833085] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Uploading image 163b8ba5-dbfa-4890-b990-7e227e0ccf91 {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 856.859637] env[63345]: DEBUG oslo_vmware.rw_handles [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 856.859637] env[63345]: value = "vm-226073" [ 856.859637] env[63345]: _type = "VirtualMachine" [ 856.859637] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 856.859938] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1775480e-0dd4-48e5-b875-03efd3bab63b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.866993] env[63345]: DEBUG oslo_vmware.rw_handles [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lease: (returnval){ [ 856.866993] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]525014d8-0440-1c70-65b7-28358fa542c1" [ 856.866993] env[63345]: _type = "HttpNfcLease" [ 856.866993] env[63345]: } obtained for exporting VM: (result){ [ 856.866993] env[63345]: value = "vm-226073" [ 856.866993] env[63345]: _type = "VirtualMachine" [ 856.866993] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 856.867426] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the lease: (returnval){ [ 856.867426] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]525014d8-0440-1c70-65b7-28358fa542c1" [ 856.867426] env[63345]: _type = "HttpNfcLease" [ 856.867426] env[63345]: } to be ready. {{(pid=63345) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 856.874535] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 856.874535] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]525014d8-0440-1c70-65b7-28358fa542c1" [ 856.874535] env[63345]: _type = "HttpNfcLease" [ 856.874535] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 856.961248] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "refresh_cache-11652422-9136-4453-b932-06695f9bc910" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.961489] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "refresh_cache-11652422-9136-4453-b932-06695f9bc910" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.961677] env[63345]: DEBUG nova.network.neutron [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 856.963203] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3296ab69-4b72-45da-ad5f-09fae11a5cb8 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Acquiring lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.124676] env[63345]: DEBUG oslo_vmware.api [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017278, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.165754] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]525aefd4-06be-2602-a8b9-8ecdc4ae45bd, 'name': SearchDatastore_Task, 'duration_secs': 0.012495} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.166576] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc4abcd9-9d01-4a99-bf19-8d98e53bf2ad {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.171862] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 857.171862] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52948999-bb1f-8807-64b1-82caf82156b0" [ 857.171862] env[63345]: _type = "Task" [ 857.171862] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.182325] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52948999-bb1f-8807-64b1-82caf82156b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.205953] env[63345]: DEBUG nova.network.neutron [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 857.262632] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.043s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.266146] env[63345]: DEBUG oslo_concurrency.lockutils [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.488s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.266990] env[63345]: DEBUG nova.objects.instance [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Lazy-loading 'resources' on Instance uuid 40d228ea-881e-4442-a16a-6758d061aa39 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 857.275889] env[63345]: DEBUG oslo_concurrency.lockutils [req-de7758b5-ca91-443e-8da9-98424c78e941 req-c3409cf4-b6df-4d24-bd5a-58bab796a6f8 service nova] Releasing lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.275889] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3296ab69-4b72-45da-ad5f-09fae11a5cb8 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Acquired lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.299160] env[63345]: INFO nova.scheduler.client.report [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Deleted allocations for instance cb712d80-be78-4c19-a891-329011521f30 [ 857.378276] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 857.378276] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]525014d8-0440-1c70-65b7-28358fa542c1" [ 857.378276] env[63345]: _type = "HttpNfcLease" [ 857.378276] env[63345]: } is ready. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 857.378276] env[63345]: DEBUG oslo_vmware.rw_handles [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 857.378276] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]525014d8-0440-1c70-65b7-28358fa542c1" [ 857.378276] env[63345]: _type = "HttpNfcLease" [ 857.378276] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 857.378276] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ef1b82-eb1f-4742-a090-b9fa83cfbc30 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.387283] env[63345]: DEBUG oslo_vmware.rw_handles [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523b5f42-a0a3-36aa-332a-57d34a1a5a9c/disk-0.vmdk from lease info. {{(pid=63345) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 857.387283] env[63345]: DEBUG oslo_vmware.rw_handles [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523b5f42-a0a3-36aa-332a-57d34a1a5a9c/disk-0.vmdk for reading. {{(pid=63345) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 857.476842] env[63345]: DEBUG nova.network.neutron [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Updating instance_info_cache with network_info: [{"id": "77f9ccf5-22c3-4c4e-8a43-15543133f465", "address": "fa:16:3e:5c:55:5e", "network": {"id": "372a3368-2d7a-4380-b811-7ad477d85250", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-454648225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41afa63287424a549133615eb390bac7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77f9ccf5-22", "ovs_interfaceid": "77f9ccf5-22c3-4c4e-8a43-15543133f465", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.522350] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-491a7aa6-72ae-4061-ba22-65b6da7a4a8d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.628844] env[63345]: DEBUG oslo_vmware.api [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017278, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.533452} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.628844] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 857.628844] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 857.629037] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 857.629128] env[63345]: INFO nova.compute.manager [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Took 1.66 seconds to destroy the instance on the hypervisor. [ 857.629370] env[63345]: DEBUG oslo.service.loopingcall [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 857.629553] env[63345]: DEBUG nova.compute.manager [-] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 857.629642] env[63345]: DEBUG nova.network.neutron [-] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 857.690752] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52948999-bb1f-8807-64b1-82caf82156b0, 'name': SearchDatastore_Task, 'duration_secs': 0.03095} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.691050] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.691347] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 75fc8365-bf8d-489e-935f-a5169c6a7e62/75fc8365-bf8d-489e-935f-a5169c6a7e62.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 857.692956] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c10d7d2b-d1e8-4ed5-9cb0-8a071371fad1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.698841] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 857.698841] env[63345]: value = "task-1017280" [ 857.698841] env[63345]: _type = "Task" [ 857.698841] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.707377] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017280, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.802953] env[63345]: DEBUG nova.network.neutron [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Updating instance_info_cache with network_info: [{"id": "71b7616d-5472-4d3c-a8ca-6984d7c70c12", "address": "fa:16:3e:ba:da:55", "network": {"id": "80bb8388-e130-46af-a4fc-1daea51d1bf5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1343573007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "964cee117b3c4601b3afe82a8bb9c23e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71b7616d-54", "ovs_interfaceid": "71b7616d-5472-4d3c-a8ca-6984d7c70c12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.810486] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a4317511-468d-4a93-8b92-7812b3bbd0dc tempest-ServersTestJSON-1425074420 tempest-ServersTestJSON-1425074420-project-member] Lock "cb712d80-be78-4c19-a891-329011521f30" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.974s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.828945] env[63345]: DEBUG nova.network.neutron [None req-3296ab69-4b72-45da-ad5f-09fae11a5cb8 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 857.864592] env[63345]: DEBUG nova.compute.manager [req-8b8dc7f6-1a77-4fb8-991c-ddd6e1c92fb0 req-df3071d8-0f6d-4b7a-a238-bb32ea06a5f8 service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Received event network-changed-0ae421d2-83f3-4520-8a37-01cb6a91a3f5 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 857.864801] env[63345]: DEBUG nova.compute.manager [req-8b8dc7f6-1a77-4fb8-991c-ddd6e1c92fb0 req-df3071d8-0f6d-4b7a-a238-bb32ea06a5f8 service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Refreshing instance network info cache due to event network-changed-0ae421d2-83f3-4520-8a37-01cb6a91a3f5. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 857.864984] env[63345]: DEBUG oslo_concurrency.lockutils [req-8b8dc7f6-1a77-4fb8-991c-ddd6e1c92fb0 req-df3071d8-0f6d-4b7a-a238-bb32ea06a5f8 service nova] Acquiring lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.980429] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Releasing lock "refresh_cache-5e20b33c-1481-4bd3-b269-29a70cc3150d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.980935] env[63345]: DEBUG nova.compute.manager [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Instance network_info: |[{"id": "77f9ccf5-22c3-4c4e-8a43-15543133f465", "address": "fa:16:3e:5c:55:5e", "network": {"id": "372a3368-2d7a-4380-b811-7ad477d85250", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-454648225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41afa63287424a549133615eb390bac7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77f9ccf5-22", "ovs_interfaceid": "77f9ccf5-22c3-4c4e-8a43-15543133f465", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 857.981491] env[63345]: DEBUG oslo_concurrency.lockutils [req-65fdfd39-4b04-4abb-8b52-a2ecb6075c10 req-bf5f9078-fd87-4f74-b766-09ee6eee04b4 service nova] Acquired lock "refresh_cache-5e20b33c-1481-4bd3-b269-29a70cc3150d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.982066] env[63345]: DEBUG nova.network.neutron [req-65fdfd39-4b04-4abb-8b52-a2ecb6075c10 req-bf5f9078-fd87-4f74-b766-09ee6eee04b4 service nova] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Refreshing network info cache for port 77f9ccf5-22c3-4c4e-8a43-15543133f465 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 857.986258] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:55:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b8af79a-31d5-4d78-93d7-3919aa1d9186', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '77f9ccf5-22c3-4c4e-8a43-15543133f465', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 857.994356] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Creating folder: Project (41afa63287424a549133615eb390bac7). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 857.996013] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb61fde3-c7bf-4b4e-9cb8-c07e19bf3ebc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.012639] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Created folder: Project (41afa63287424a549133615eb390bac7) in parent group-v225918. [ 858.012639] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Creating folder: Instances. Parent ref: group-v226075. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 858.016010] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-207b9c8e-5cc0-4eb4-9b79-528d8f609a22 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.025568] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Created folder: Instances in parent group-v226075. [ 858.025845] env[63345]: DEBUG oslo.service.loopingcall [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 858.026158] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 858.026391] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aeceb009-43b8-4763-815b-8aeaef5a6c96 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.065236] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 858.065236] env[63345]: value = "task-1017283" [ 858.065236] env[63345]: _type = "Task" [ 858.065236] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.082855] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017283, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.211569] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017280, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.271346] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13589f25-f6a4-45c3-a845-1f075b465747 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.280464] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-417e01d4-dc5d-44b7-9f16-8cd3ce602ad9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.316607] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "refresh_cache-11652422-9136-4453-b932-06695f9bc910" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.323241] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3864894b-327b-47d5-80c1-51fb150acbba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.333238] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dfe7c2e-6e31-470f-9e85-c4d1c2261068 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.357278] env[63345]: DEBUG nova.compute.provider_tree [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.577437] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017283, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.593323] env[63345]: DEBUG nova.network.neutron [-] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.714432] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017280, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.703817} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.714740] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 75fc8365-bf8d-489e-935f-a5169c6a7e62/75fc8365-bf8d-489e-935f-a5169c6a7e62.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 858.715332] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 858.715714] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-37225905-1fb5-44df-af8c-a232442c67b9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.723564] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 858.723564] env[63345]: value = "task-1017284" [ 858.723564] env[63345]: _type = "Task" [ 858.723564] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.734014] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017284, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.835995] env[63345]: DEBUG nova.network.neutron [req-65fdfd39-4b04-4abb-8b52-a2ecb6075c10 req-bf5f9078-fd87-4f74-b766-09ee6eee04b4 service nova] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Updated VIF entry in instance network info cache for port 77f9ccf5-22c3-4c4e-8a43-15543133f465. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 858.836651] env[63345]: DEBUG nova.network.neutron [req-65fdfd39-4b04-4abb-8b52-a2ecb6075c10 req-bf5f9078-fd87-4f74-b766-09ee6eee04b4 service nova] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Updating instance_info_cache with network_info: [{"id": "77f9ccf5-22c3-4c4e-8a43-15543133f465", "address": "fa:16:3e:5c:55:5e", "network": {"id": "372a3368-2d7a-4380-b811-7ad477d85250", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-454648225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41afa63287424a549133615eb390bac7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77f9ccf5-22", "ovs_interfaceid": "77f9ccf5-22c3-4c4e-8a43-15543133f465", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.846236] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5962d2db-42b0-4df0-b7f6-3b2a889d416a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.866657] env[63345]: DEBUG nova.scheduler.client.report [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 858.872098] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3595613-c1a5-4665-bb06-dcdd2508e216 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.884645] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Updating instance '11652422-9136-4453-b932-06695f9bc910' progress to 83 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 859.002697] env[63345]: DEBUG nova.network.neutron [None req-3296ab69-4b72-45da-ad5f-09fae11a5cb8 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Updating instance_info_cache with network_info: [{"id": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "address": "fa:16:3e:3e:4a:12", "network": {"id": "a7e35920-e04b-4676-9e3a-a322585417c0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1167528582-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87516cd599534b94801951669a97a9e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ae421d2-83", "ovs_interfaceid": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.078968] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017283, 'name': CreateVM_Task, 'duration_secs': 0.909785} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.080320] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 859.081076] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.081299] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.081708] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 859.082021] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-109176a4-5896-4fd5-8bf1-707000a0b22d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.088283] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 859.088283] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]526216f6-fffa-0362-933c-2fc15b890894" [ 859.088283] env[63345]: _type = "Task" [ 859.088283] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.096955] env[63345]: INFO nova.compute.manager [-] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Took 1.47 seconds to deallocate network for instance. [ 859.097409] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526216f6-fffa-0362-933c-2fc15b890894, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.101768] env[63345]: DEBUG nova.compute.manager [req-e68c8731-5be8-46e2-89ef-ba719c3308f1 req-72e06009-6d3a-4af2-bc81-c29bf9ae5c28 service nova] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Received event network-vif-deleted-1ebdc50b-86da-4bb4-8884-530d087bf7dd {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 859.237960] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017284, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075218} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.237960] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 859.238714] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c3bc5d-e4e0-4851-9dde-e814166d0c17 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.268223] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 75fc8365-bf8d-489e-935f-a5169c6a7e62/75fc8365-bf8d-489e-935f-a5169c6a7e62.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 859.269637] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74a1cc43-d4bf-445e-abd5-e38f7df507b6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.292960] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 859.292960] env[63345]: value = "task-1017285" [ 859.292960] env[63345]: _type = "Task" [ 859.292960] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.301666] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017285, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.345058] env[63345]: DEBUG oslo_concurrency.lockutils [req-65fdfd39-4b04-4abb-8b52-a2ecb6075c10 req-bf5f9078-fd87-4f74-b766-09ee6eee04b4 service nova] Releasing lock "refresh_cache-5e20b33c-1481-4bd3-b269-29a70cc3150d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.377534] env[63345]: DEBUG oslo_concurrency.lockutils [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.112s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.381592] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.542s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.383341] env[63345]: INFO nova.compute.claims [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 859.390442] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 859.390817] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-42063c75-19c1-4d10-a570-c4d816e12d25 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.398365] env[63345]: DEBUG oslo_vmware.api [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 859.398365] env[63345]: value = "task-1017286" [ 859.398365] env[63345]: _type = "Task" [ 859.398365] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.414535] env[63345]: DEBUG oslo_vmware.api [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017286, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.417139] env[63345]: INFO nova.scheduler.client.report [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Deleted allocations for instance 40d228ea-881e-4442-a16a-6758d061aa39 [ 859.508112] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3296ab69-4b72-45da-ad5f-09fae11a5cb8 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Releasing lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.508515] env[63345]: DEBUG nova.compute.manager [None req-3296ab69-4b72-45da-ad5f-09fae11a5cb8 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Inject network info {{(pid=63345) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7548}} [ 859.508986] env[63345]: DEBUG nova.compute.manager [None req-3296ab69-4b72-45da-ad5f-09fae11a5cb8 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] network_info to inject: |[{"id": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "address": "fa:16:3e:3e:4a:12", "network": {"id": "a7e35920-e04b-4676-9e3a-a322585417c0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1167528582-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87516cd599534b94801951669a97a9e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ae421d2-83", "ovs_interfaceid": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7549}} [ 859.514453] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3296ab69-4b72-45da-ad5f-09fae11a5cb8 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Reconfiguring VM instance to set the machine id {{(pid=63345) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1802}} [ 859.515404] env[63345]: DEBUG oslo_concurrency.lockutils [req-8b8dc7f6-1a77-4fb8-991c-ddd6e1c92fb0 req-df3071d8-0f6d-4b7a-a238-bb32ea06a5f8 service nova] Acquired lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.515757] env[63345]: DEBUG nova.network.neutron [req-8b8dc7f6-1a77-4fb8-991c-ddd6e1c92fb0 req-df3071d8-0f6d-4b7a-a238-bb32ea06a5f8 service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Refreshing network info cache for port 0ae421d2-83f3-4520-8a37-01cb6a91a3f5 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 859.517209] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ce0f4fc-9ded-413e-8916-8690afc807ce {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.534811] env[63345]: DEBUG oslo_vmware.api [None req-3296ab69-4b72-45da-ad5f-09fae11a5cb8 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Waiting for the task: (returnval){ [ 859.534811] env[63345]: value = "task-1017287" [ 859.534811] env[63345]: _type = "Task" [ 859.534811] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.545688] env[63345]: DEBUG oslo_vmware.api [None req-3296ab69-4b72-45da-ad5f-09fae11a5cb8 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1017287, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.599207] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526216f6-fffa-0362-933c-2fc15b890894, 'name': SearchDatastore_Task, 'duration_secs': 0.013909} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.599460] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.599705] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 859.599955] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.600122] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.600374] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 859.600582] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0aec49ff-b0ce-4b90-b265-6597e6930e5c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.607129] env[63345]: DEBUG oslo_concurrency.lockutils [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.609780] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 859.611437] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 859.611437] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4ba5128-3e06-48f5-9ec6-398ed5193f2b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.619139] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 859.619139] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52240449-b9b6-aaab-37ca-1a1f0ea091a3" [ 859.619139] env[63345]: _type = "Task" [ 859.619139] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.632585] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52240449-b9b6-aaab-37ca-1a1f0ea091a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.682722] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Acquiring lock "805f9143-a8d8-4995-a20d-3b10ef3ab599" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.682722] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Lock "805f9143-a8d8-4995-a20d-3b10ef3ab599" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.682722] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Acquiring lock "805f9143-a8d8-4995-a20d-3b10ef3ab599-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.682722] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Lock "805f9143-a8d8-4995-a20d-3b10ef3ab599-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.682722] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Lock "805f9143-a8d8-4995-a20d-3b10ef3ab599-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.686929] env[63345]: INFO nova.compute.manager [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Terminating instance [ 859.803833] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017285, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.914089] env[63345]: DEBUG oslo_vmware.api [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017286, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.924749] env[63345]: DEBUG oslo_concurrency.lockutils [None req-921d1268-d248-42bd-8630-9e88a573b677 tempest-ServerDiagnosticsTest-1401656714 tempest-ServerDiagnosticsTest-1401656714-project-member] Lock "40d228ea-881e-4442-a16a-6758d061aa39" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.743s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.048488] env[63345]: DEBUG oslo_vmware.api [None req-3296ab69-4b72-45da-ad5f-09fae11a5cb8 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1017287, 'name': ReconfigVM_Task, 'duration_secs': 0.167326} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.048861] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3296ab69-4b72-45da-ad5f-09fae11a5cb8 tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Reconfigured VM instance to set the machine id {{(pid=63345) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1805}} [ 860.131888] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52240449-b9b6-aaab-37ca-1a1f0ea091a3, 'name': SearchDatastore_Task, 'duration_secs': 0.015447} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.132817] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-328b1e61-4eaa-4c45-b909-213b48347faa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.139497] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 860.139497] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5252296b-2765-33f0-c25e-df3eef193053" [ 860.139497] env[63345]: _type = "Task" [ 860.139497] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.147650] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5252296b-2765-33f0-c25e-df3eef193053, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.194356] env[63345]: DEBUG nova.compute.manager [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 860.194631] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 860.198267] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11e8244-e9f4-4f32-8cf6-18eb7f9ace61 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.207751] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 860.208111] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8320ef41-44cc-4431-85dd-b2c36b5bd9dd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.215055] env[63345]: DEBUG oslo_vmware.api [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Waiting for the task: (returnval){ [ 860.215055] env[63345]: value = "task-1017288" [ 860.215055] env[63345]: _type = "Task" [ 860.215055] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.228362] env[63345]: DEBUG oslo_vmware.api [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1017288, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.306272] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017285, 'name': ReconfigVM_Task, 'duration_secs': 0.646875} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.306272] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 75fc8365-bf8d-489e-935f-a5169c6a7e62/75fc8365-bf8d-489e-935f-a5169c6a7e62.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 860.307254] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-47cf8e9f-c59f-40fb-9cab-7922f946b6a9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.315736] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 860.315736] env[63345]: value = "task-1017289" [ 860.315736] env[63345]: _type = "Task" [ 860.315736] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.330065] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017289, 'name': Rename_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.413919] env[63345]: DEBUG oslo_vmware.api [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017286, 'name': PowerOnVM_Task, 'duration_secs': 0.697029} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.413919] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 860.414095] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5077c400-6ef8-4b26-912d-964466ba2c45 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Updating instance '11652422-9136-4453-b932-06695f9bc910' progress to 100 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 860.470788] env[63345]: DEBUG nova.network.neutron [req-8b8dc7f6-1a77-4fb8-991c-ddd6e1c92fb0 req-df3071d8-0f6d-4b7a-a238-bb32ea06a5f8 service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Updated VIF entry in instance network info cache for port 0ae421d2-83f3-4520-8a37-01cb6a91a3f5. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 860.473988] env[63345]: DEBUG nova.network.neutron [req-8b8dc7f6-1a77-4fb8-991c-ddd6e1c92fb0 req-df3071d8-0f6d-4b7a-a238-bb32ea06a5f8 service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Updating instance_info_cache with network_info: [{"id": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "address": "fa:16:3e:3e:4a:12", "network": {"id": "a7e35920-e04b-4676-9e3a-a322585417c0", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1167528582-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87516cd599534b94801951669a97a9e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ae421d2-83", "ovs_interfaceid": "0ae421d2-83f3-4520-8a37-01cb6a91a3f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.654378] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5252296b-2765-33f0-c25e-df3eef193053, 'name': SearchDatastore_Task, 'duration_secs': 0.009791} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.654699] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.655034] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 5e20b33c-1481-4bd3-b269-29a70cc3150d/5e20b33c-1481-4bd3-b269-29a70cc3150d.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 860.655322] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8764b20d-889d-42f7-b515-57901cbc1c2a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.669024] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 860.669024] env[63345]: value = "task-1017290" [ 860.669024] env[63345]: _type = "Task" [ 860.669024] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.677289] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017290, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.726354] env[63345]: DEBUG oslo_vmware.api [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1017288, 'name': PowerOffVM_Task, 'duration_secs': 0.221704} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.729602] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 860.729792] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 860.731130] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e62ffae-4b81-4434-934b-f8979523273f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.801267] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 860.802239] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 860.802471] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Deleting the datastore file [datastore2] 805f9143-a8d8-4995-a20d-3b10ef3ab599 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 860.803460] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b986935-50a8-47c1-a5e9-03d63a3ac736 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.812429] env[63345]: DEBUG oslo_vmware.api [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Waiting for the task: (returnval){ [ 860.812429] env[63345]: value = "task-1017292" [ 860.812429] env[63345]: _type = "Task" [ 860.812429] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.824799] env[63345]: DEBUG oslo_vmware.api [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1017292, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.832155] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017289, 'name': Rename_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.874062] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515112a9-c5ca-4d92-b802-857a91670e67 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.882552] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b17b7a-d3d7-42b2-b624-0fb07dfb593d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.926577] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee21e3b5-890d-4cd4-8bc9-74882d89ee46 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.935973] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d4a622a-6ddc-433a-9011-910258a0d265 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.954081] env[63345]: DEBUG nova.compute.provider_tree [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.979496] env[63345]: DEBUG oslo_concurrency.lockutils [req-8b8dc7f6-1a77-4fb8-991c-ddd6e1c92fb0 req-df3071d8-0f6d-4b7a-a238-bb32ea06a5f8 service nova] Releasing lock "refresh_cache-805f9143-a8d8-4995-a20d-3b10ef3ab599" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.177959] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017290, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.326926] env[63345]: DEBUG oslo_vmware.api [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Task: {'id': task-1017292, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.505531} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.333392] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 861.333500] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 861.333746] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 861.334433] env[63345]: INFO nova.compute.manager [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Took 1.14 seconds to destroy the instance on the hypervisor. [ 861.334433] env[63345]: DEBUG oslo.service.loopingcall [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 861.334623] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017289, 'name': Rename_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.334779] env[63345]: DEBUG nova.compute.manager [-] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 861.334833] env[63345]: DEBUG nova.network.neutron [-] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 861.458049] env[63345]: DEBUG nova.scheduler.client.report [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 861.679504] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017290, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.568531} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.679786] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 5e20b33c-1481-4bd3-b269-29a70cc3150d/5e20b33c-1481-4bd3-b269-29a70cc3150d.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 861.680088] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 861.680383] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-32262321-d4af-4514-9c15-8be99845bb00 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.687276] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 861.687276] env[63345]: value = "task-1017293" [ 861.687276] env[63345]: _type = "Task" [ 861.687276] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.695699] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017293, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.834396] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017289, 'name': Rename_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.967595] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.968877] env[63345]: DEBUG nova.compute.manager [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 861.972632] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.638s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.972988] env[63345]: DEBUG nova.objects.instance [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lazy-loading 'resources' on Instance uuid df2f06af-54a6-4dbd-83ff-1e4b066acbf3 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 862.171752] env[63345]: DEBUG nova.compute.manager [req-794446c7-5273-4185-9542-ceeee0be6c7a req-91bddbd5-a52d-4f1e-b358-2f26dbc43f6c service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Received event network-vif-deleted-0ae421d2-83f3-4520-8a37-01cb6a91a3f5 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 862.171887] env[63345]: INFO nova.compute.manager [req-794446c7-5273-4185-9542-ceeee0be6c7a req-91bddbd5-a52d-4f1e-b358-2f26dbc43f6c service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Neutron deleted interface 0ae421d2-83f3-4520-8a37-01cb6a91a3f5; detaching it from the instance and deleting it from the info cache [ 862.172232] env[63345]: DEBUG nova.network.neutron [req-794446c7-5273-4185-9542-ceeee0be6c7a req-91bddbd5-a52d-4f1e-b358-2f26dbc43f6c service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.200721] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017293, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.3375} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.201240] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 862.202800] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3b1fc4-b0a6-4215-a130-f1150c9b98c1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.234073] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] 5e20b33c-1481-4bd3-b269-29a70cc3150d/5e20b33c-1481-4bd3-b269-29a70cc3150d.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 862.234610] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2c085d6-d0f7-497e-9579-e43b2d133db9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.261822] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 862.261822] env[63345]: value = "task-1017294" [ 862.261822] env[63345]: _type = "Task" [ 862.261822] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.271026] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017294, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.330182] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017289, 'name': Rename_Task, 'duration_secs': 1.764973} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.330676] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 862.331105] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ac564b3-ac01-4438-87bc-1fb510fe7d96 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.339808] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 862.339808] env[63345]: value = "task-1017295" [ 862.339808] env[63345]: _type = "Task" [ 862.339808] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.350692] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017295, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.478929] env[63345]: DEBUG nova.compute.utils [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 862.480882] env[63345]: DEBUG nova.compute.manager [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 862.481071] env[63345]: DEBUG nova.network.neutron [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 862.512024] env[63345]: DEBUG nova.network.neutron [-] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.535195] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "11652422-9136-4453-b932-06695f9bc910" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.535741] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "11652422-9136-4453-b932-06695f9bc910" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.535920] env[63345]: DEBUG nova.compute.manager [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Going to confirm migration 2 {{(pid=63345) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5112}} [ 862.623033] env[63345]: DEBUG nova.policy [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e36fd04030444217acadbbf4e4fe9be0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33c28bfca4da460e8ca96dc7519204c8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 862.675226] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-93e25b27-1296-492d-b5bd-71fde0faf193 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.691569] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072646bd-7b0f-456d-9184-d995cbaa6d6e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.734589] env[63345]: DEBUG nova.compute.manager [req-794446c7-5273-4185-9542-ceeee0be6c7a req-91bddbd5-a52d-4f1e-b358-2f26dbc43f6c service nova] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Detach interface failed, port_id=0ae421d2-83f3-4520-8a37-01cb6a91a3f5, reason: Instance 805f9143-a8d8-4995-a20d-3b10ef3ab599 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 862.775439] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017294, 'name': ReconfigVM_Task, 'duration_secs': 0.357216} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.775709] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Reconfigured VM instance instance-0000004d to attach disk [datastore2] 5e20b33c-1481-4bd3-b269-29a70cc3150d/5e20b33c-1481-4bd3-b269-29a70cc3150d.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 862.776366] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-87f6923b-2b92-493c-994c-70583d7d44f7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.785249] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 862.785249] env[63345]: value = "task-1017296" [ 862.785249] env[63345]: _type = "Task" [ 862.785249] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.795486] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017296, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.850965] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017295, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.913275] env[63345]: DEBUG oslo_concurrency.lockutils [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "9aa651b8-317d-4153-8c33-9df0a5d16115" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.913588] env[63345]: DEBUG oslo_concurrency.lockutils [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "9aa651b8-317d-4153-8c33-9df0a5d16115" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.913821] env[63345]: INFO nova.compute.manager [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Shelving [ 862.927381] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc48940-c632-4fb0-a392-eba347921ae5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.936781] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e31a7842-15d4-4d1d-a120-b7c956d4346c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.971996] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34692135-436e-46f7-b7be-8822ddceaadc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.980585] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e34c58-b056-4783-89b7-9b10e4b06743 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.985381] env[63345]: DEBUG nova.compute.manager [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 863.000876] env[63345]: DEBUG nova.compute.provider_tree [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 863.014337] env[63345]: INFO nova.compute.manager [-] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Took 1.68 seconds to deallocate network for instance. [ 863.124612] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "refresh_cache-11652422-9136-4453-b932-06695f9bc910" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.124832] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "refresh_cache-11652422-9136-4453-b932-06695f9bc910" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.125035] env[63345]: DEBUG nova.network.neutron [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 863.125233] env[63345]: DEBUG nova.objects.instance [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lazy-loading 'info_cache' on Instance uuid 11652422-9136-4453-b932-06695f9bc910 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 863.297729] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017296, 'name': Rename_Task, 'duration_secs': 0.163995} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.298155] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 863.298482] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fccbbe1a-074e-4b44-b2ac-dbff94ac3c15 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.307037] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 863.307037] env[63345]: value = "task-1017297" [ 863.307037] env[63345]: _type = "Task" [ 863.307037] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.320456] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017297, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.353101] env[63345]: DEBUG oslo_vmware.api [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017295, 'name': PowerOnVM_Task, 'duration_secs': 0.65182} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.353511] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 863.353856] env[63345]: INFO nova.compute.manager [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Took 12.81 seconds to spawn the instance on the hypervisor. [ 863.354261] env[63345]: DEBUG nova.compute.manager [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 863.355555] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829d8d54-67a7-414a-a76b-fff9774d5463 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.504404] env[63345]: DEBUG nova.scheduler.client.report [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 863.515834] env[63345]: DEBUG nova.network.neutron [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Successfully created port: 9e054cb2-eb47-4dd3-8ec7-d8205d577337 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 863.521023] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.819663] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017297, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.875173] env[63345]: INFO nova.compute.manager [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Took 38.34 seconds to build instance. [ 863.923326] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 863.923776] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cafe9d52-b25a-4bdd-9a3a-1545c537304c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.933074] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 863.933074] env[63345]: value = "task-1017298" [ 863.933074] env[63345]: _type = "Task" [ 863.933074] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.947423] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017298, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.999332] env[63345]: DEBUG nova.compute.manager [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 864.011824] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.038s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.014836] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.624s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.017938] env[63345]: INFO nova.compute.claims [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 864.042529] env[63345]: DEBUG nova.virt.hardware [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 864.042529] env[63345]: DEBUG nova.virt.hardware [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 864.042529] env[63345]: DEBUG nova.virt.hardware [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 864.043389] env[63345]: DEBUG nova.virt.hardware [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 864.043614] env[63345]: DEBUG nova.virt.hardware [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 864.043822] env[63345]: DEBUG nova.virt.hardware [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 864.044085] env[63345]: DEBUG nova.virt.hardware [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 864.044313] env[63345]: DEBUG nova.virt.hardware [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 864.044537] env[63345]: DEBUG nova.virt.hardware [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 864.044867] env[63345]: DEBUG nova.virt.hardware [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 864.044947] env[63345]: DEBUG nova.virt.hardware [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 864.045876] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41dec372-0087-4783-9c90-9ae1f6e0f1c9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.051395] env[63345]: INFO nova.scheduler.client.report [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Deleted allocations for instance df2f06af-54a6-4dbd-83ff-1e4b066acbf3 [ 864.066812] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669b62b8-eb66-4378-a837-9854d087a50a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.327389] env[63345]: DEBUG oslo_vmware.api [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017297, 'name': PowerOnVM_Task, 'duration_secs': 0.512859} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.327389] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 864.327389] env[63345]: INFO nova.compute.manager [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Took 9.13 seconds to spawn the instance on the hypervisor. [ 864.327389] env[63345]: DEBUG nova.compute.manager [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 864.329058] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f135bf7-9f3f-441c-bdb2-8ce77c2864b9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.375118] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8e52948-9b83-44ce-bdeb-44628efda67f tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "75fc8365-bf8d-489e-935f-a5169c6a7e62" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.852s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.445510] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017298, 'name': PowerOffVM_Task, 'duration_secs': 0.430663} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.445754] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 864.446631] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9a354f-6c4f-4114-b0e1-934d206e7001 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.470037] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a717718b-45d7-4fe2-936d-9dd4bd34f00a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.475296] env[63345]: DEBUG nova.network.neutron [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Updating instance_info_cache with network_info: [{"id": "71b7616d-5472-4d3c-a8ca-6984d7c70c12", "address": "fa:16:3e:ba:da:55", "network": {"id": "80bb8388-e130-46af-a4fc-1daea51d1bf5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1343573007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "964cee117b3c4601b3afe82a8bb9c23e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71b7616d-54", "ovs_interfaceid": "71b7616d-5472-4d3c-a8ca-6984d7c70c12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.570650] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d6b74ae1-2c3a-4c57-a9e1-bd0268848c63 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "df2f06af-54a6-4dbd-83ff-1e4b066acbf3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.804s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.760388] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d8193250-7d50-454f-ac0d-40b80747489c tempest-ServersAdminTestJSON-558260828 tempest-ServersAdminTestJSON-558260828-project-admin] Acquiring lock "refresh_cache-75fc8365-bf8d-489e-935f-a5169c6a7e62" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.760572] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d8193250-7d50-454f-ac0d-40b80747489c tempest-ServersAdminTestJSON-558260828 tempest-ServersAdminTestJSON-558260828-project-admin] Acquired lock "refresh_cache-75fc8365-bf8d-489e-935f-a5169c6a7e62" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.760746] env[63345]: DEBUG nova.network.neutron [None req-d8193250-7d50-454f-ac0d-40b80747489c tempest-ServersAdminTestJSON-558260828 tempest-ServersAdminTestJSON-558260828-project-admin] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 864.866183] env[63345]: INFO nova.compute.manager [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Took 37.34 seconds to build instance. [ 864.983398] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "refresh_cache-11652422-9136-4453-b932-06695f9bc910" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.983682] env[63345]: DEBUG nova.objects.instance [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lazy-loading 'migration_context' on Instance uuid 11652422-9136-4453-b932-06695f9bc910 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 864.986074] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Creating Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 864.987079] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-998e5cc6-2615-4ebc-a422-a75461f5d073 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.999682] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 864.999682] env[63345]: value = "task-1017299" [ 864.999682] env[63345]: _type = "Task" [ 864.999682] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.011112] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017299, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.369395] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ea9cf6c0-9290-415f-93c1-4cd8cb679f4c tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "5e20b33c-1481-4bd3-b269-29a70cc3150d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.855s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.462549] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00170810-5ef3-4cae-8b3b-a8f6019f3302 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.476112] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6159695e-780f-4fa7-8a2d-69cd5edc4037 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.513943] env[63345]: DEBUG nova.objects.base [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Object Instance<11652422-9136-4453-b932-06695f9bc910> lazy-loaded attributes: info_cache,migration_context {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 865.516044] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32211332-4563-4a94-b4ae-7af2f22a315f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.524352] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1dba68-e4b4-44b7-91e8-7061bc1ef6ee {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.537374] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017299, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.554984] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb11dfd-dedd-4b62-b6c7-b5c61febece2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.559833] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5368a41b-5f1e-4653-b575-7f27576715bd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.570246] env[63345]: DEBUG oslo_vmware.api [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 865.570246] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52974d95-fd73-9f4b-27b6-168740626d9d" [ 865.570246] env[63345]: _type = "Task" [ 865.570246] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.577319] env[63345]: DEBUG nova.compute.provider_tree [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 865.588779] env[63345]: DEBUG oslo_vmware.api [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52974d95-fd73-9f4b-27b6-168740626d9d, 'name': SearchDatastore_Task, 'duration_secs': 0.009825} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.589897] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.655160] env[63345]: DEBUG nova.network.neutron [None req-d8193250-7d50-454f-ac0d-40b80747489c tempest-ServersAdminTestJSON-558260828 tempest-ServersAdminTestJSON-558260828-project-admin] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Updating instance_info_cache with network_info: [{"id": "2bb8e7ea-091a-4a60-9a2b-e9b196790b55", "address": "fa:16:3e:25:6e:9d", "network": {"id": "ce89b46a-97ec-4f2d-be39-333e9fcf307d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-416012078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b389a73e7804452b23d8c00bedd0362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bb8e7ea-09", "ovs_interfaceid": "2bb8e7ea-091a-4a60-9a2b-e9b196790b55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.981655] env[63345]: DEBUG nova.network.neutron [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Successfully updated port: 9e054cb2-eb47-4dd3-8ec7-d8205d577337 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 866.029476] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017299, 'name': CreateSnapshot_Task, 'duration_secs': 0.889047} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.029476] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Created Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 866.029476] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e114175-abe8-45c1-adc3-bf555b6365d7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.083622] env[63345]: DEBUG nova.scheduler.client.report [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 866.095521] env[63345]: DEBUG oslo_concurrency.lockutils [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "27e2cb12-d251-434a-b79e-6fbda80d3637" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.095928] env[63345]: DEBUG oslo_concurrency.lockutils [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "27e2cb12-d251-434a-b79e-6fbda80d3637" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.161183] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d8193250-7d50-454f-ac0d-40b80747489c tempest-ServersAdminTestJSON-558260828 tempest-ServersAdminTestJSON-558260828-project-admin] Releasing lock "refresh_cache-75fc8365-bf8d-489e-935f-a5169c6a7e62" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.161183] env[63345]: DEBUG nova.compute.manager [None req-d8193250-7d50-454f-ac0d-40b80747489c tempest-ServersAdminTestJSON-558260828 tempest-ServersAdminTestJSON-558260828-project-admin] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Inject network info {{(pid=63345) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7548}} [ 866.161183] env[63345]: DEBUG nova.compute.manager [None req-d8193250-7d50-454f-ac0d-40b80747489c tempest-ServersAdminTestJSON-558260828 tempest-ServersAdminTestJSON-558260828-project-admin] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] network_info to inject: |[{"id": "2bb8e7ea-091a-4a60-9a2b-e9b196790b55", "address": "fa:16:3e:25:6e:9d", "network": {"id": "ce89b46a-97ec-4f2d-be39-333e9fcf307d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-416012078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b389a73e7804452b23d8c00bedd0362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bb8e7ea-09", "ovs_interfaceid": "2bb8e7ea-091a-4a60-9a2b-e9b196790b55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7549}} [ 866.165201] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d8193250-7d50-454f-ac0d-40b80747489c tempest-ServersAdminTestJSON-558260828 tempest-ServersAdminTestJSON-558260828-project-admin] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Reconfiguring VM instance to set the machine id {{(pid=63345) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1802}} [ 866.166796] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-084876ae-656d-44e0-be3d-97cdc8547b0e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.188160] env[63345]: DEBUG oslo_vmware.api [None req-d8193250-7d50-454f-ac0d-40b80747489c tempest-ServersAdminTestJSON-558260828 tempest-ServersAdminTestJSON-558260828-project-admin] Waiting for the task: (returnval){ [ 866.188160] env[63345]: value = "task-1017300" [ 866.188160] env[63345]: _type = "Task" [ 866.188160] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.197645] env[63345]: DEBUG oslo_vmware.api [None req-d8193250-7d50-454f-ac0d-40b80747489c tempest-ServersAdminTestJSON-558260828 tempest-ServersAdminTestJSON-558260828-project-admin] Task: {'id': task-1017300, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.379246] env[63345]: DEBUG nova.compute.manager [req-dd7ca9f2-eefe-4102-acd1-c2adcb9c61e2 req-58b66ff5-5d20-4d89-b62d-98203906acc1 service nova] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Received event network-changed-77f9ccf5-22c3-4c4e-8a43-15543133f465 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 866.379246] env[63345]: DEBUG nova.compute.manager [req-dd7ca9f2-eefe-4102-acd1-c2adcb9c61e2 req-58b66ff5-5d20-4d89-b62d-98203906acc1 service nova] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Refreshing instance network info cache due to event network-changed-77f9ccf5-22c3-4c4e-8a43-15543133f465. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 866.379246] env[63345]: DEBUG oslo_concurrency.lockutils [req-dd7ca9f2-eefe-4102-acd1-c2adcb9c61e2 req-58b66ff5-5d20-4d89-b62d-98203906acc1 service nova] Acquiring lock "refresh_cache-5e20b33c-1481-4bd3-b269-29a70cc3150d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.379246] env[63345]: DEBUG oslo_concurrency.lockutils [req-dd7ca9f2-eefe-4102-acd1-c2adcb9c61e2 req-58b66ff5-5d20-4d89-b62d-98203906acc1 service nova] Acquired lock "refresh_cache-5e20b33c-1481-4bd3-b269-29a70cc3150d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.379582] env[63345]: DEBUG nova.network.neutron [req-dd7ca9f2-eefe-4102-acd1-c2adcb9c61e2 req-58b66ff5-5d20-4d89-b62d-98203906acc1 service nova] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Refreshing network info cache for port 77f9ccf5-22c3-4c4e-8a43-15543133f465 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 866.487451] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.487673] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.487775] env[63345]: DEBUG nova.network.neutron [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 866.514137] env[63345]: DEBUG nova.compute.manager [req-98b13e92-7a6c-4a88-97d5-01e95c5ac2b5 req-e1aa36f6-3561-410f-8de5-2ecbe61d151b service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Received event network-vif-plugged-9e054cb2-eb47-4dd3-8ec7-d8205d577337 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 866.514335] env[63345]: DEBUG oslo_concurrency.lockutils [req-98b13e92-7a6c-4a88-97d5-01e95c5ac2b5 req-e1aa36f6-3561-410f-8de5-2ecbe61d151b service nova] Acquiring lock "0da64b45-fa00-4fe8-8d1d-df586f27743f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.514592] env[63345]: DEBUG oslo_concurrency.lockutils [req-98b13e92-7a6c-4a88-97d5-01e95c5ac2b5 req-e1aa36f6-3561-410f-8de5-2ecbe61d151b service nova] Lock "0da64b45-fa00-4fe8-8d1d-df586f27743f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.514794] env[63345]: DEBUG oslo_concurrency.lockutils [req-98b13e92-7a6c-4a88-97d5-01e95c5ac2b5 req-e1aa36f6-3561-410f-8de5-2ecbe61d151b service nova] Lock "0da64b45-fa00-4fe8-8d1d-df586f27743f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.515044] env[63345]: DEBUG nova.compute.manager [req-98b13e92-7a6c-4a88-97d5-01e95c5ac2b5 req-e1aa36f6-3561-410f-8de5-2ecbe61d151b service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] No waiting events found dispatching network-vif-plugged-9e054cb2-eb47-4dd3-8ec7-d8205d577337 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 866.515292] env[63345]: WARNING nova.compute.manager [req-98b13e92-7a6c-4a88-97d5-01e95c5ac2b5 req-e1aa36f6-3561-410f-8de5-2ecbe61d151b service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Received unexpected event network-vif-plugged-9e054cb2-eb47-4dd3-8ec7-d8205d577337 for instance with vm_state building and task_state spawning. [ 866.550492] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Creating linked-clone VM from snapshot {{(pid=63345) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 866.551281] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-46f10bd9-d65a-4c73-baac-905d24490b27 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.561164] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 866.561164] env[63345]: value = "task-1017301" [ 866.561164] env[63345]: _type = "Task" [ 866.561164] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.571964] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017301, 'name': CloneVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.596654] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.582s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.598788] env[63345]: DEBUG nova.compute.manager [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 866.601977] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.094s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.602451] env[63345]: DEBUG nova.objects.instance [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lazy-loading 'resources' on Instance uuid 4a59b565-571f-48ef-97bd-bed9853e2d8e {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 866.610021] env[63345]: DEBUG nova.compute.manager [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 866.704372] env[63345]: DEBUG oslo_vmware.api [None req-d8193250-7d50-454f-ac0d-40b80747489c tempest-ServersAdminTestJSON-558260828 tempest-ServersAdminTestJSON-558260828-project-admin] Task: {'id': task-1017300, 'name': ReconfigVM_Task, 'duration_secs': 0.248261} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.705184] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d8193250-7d50-454f-ac0d-40b80747489c tempest-ServersAdminTestJSON-558260828 tempest-ServersAdminTestJSON-558260828-project-admin] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Reconfigured VM instance to set the machine id {{(pid=63345) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1805}} [ 866.954614] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Acquiring lock "017a06b3-cc1a-4822-a07f-ca881fd4254b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.954888] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Lock "017a06b3-cc1a-4822-a07f-ca881fd4254b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.029878] env[63345]: DEBUG nova.network.neutron [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 867.074987] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017301, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.122106] env[63345]: DEBUG nova.compute.utils [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 867.126444] env[63345]: DEBUG nova.compute.manager [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 867.126637] env[63345]: DEBUG nova.network.neutron [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 867.146864] env[63345]: DEBUG oslo_concurrency.lockutils [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.179885] env[63345]: DEBUG nova.policy [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '56441dfd40fa467da376ce828d48f331', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be4b8982dd144c969cb530f52ed9297b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 867.266370] env[63345]: DEBUG nova.network.neutron [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Updating instance_info_cache with network_info: [{"id": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "address": "fa:16:3e:80:1f:9f", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e054cb2-eb", "ovs_interfaceid": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.273577] env[63345]: DEBUG nova.network.neutron [req-dd7ca9f2-eefe-4102-acd1-c2adcb9c61e2 req-58b66ff5-5d20-4d89-b62d-98203906acc1 service nova] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Updated VIF entry in instance network info cache for port 77f9ccf5-22c3-4c4e-8a43-15543133f465. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 867.273953] env[63345]: DEBUG nova.network.neutron [req-dd7ca9f2-eefe-4102-acd1-c2adcb9c61e2 req-58b66ff5-5d20-4d89-b62d-98203906acc1 service nova] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Updating instance_info_cache with network_info: [{"id": "77f9ccf5-22c3-4c4e-8a43-15543133f465", "address": "fa:16:3e:5c:55:5e", "network": {"id": "372a3368-2d7a-4380-b811-7ad477d85250", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-454648225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41afa63287424a549133615eb390bac7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77f9ccf5-22", "ovs_interfaceid": "77f9ccf5-22c3-4c4e-8a43-15543133f465", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.458953] env[63345]: DEBUG nova.compute.manager [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 867.556749] env[63345]: DEBUG nova.network.neutron [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Successfully created port: 5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 867.575798] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017301, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.594052] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbcfc19-ecb8-4cdf-b3af-c624f712c962 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.604326] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d44799-d4ac-45f7-b37b-d01fd38e4977 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.635156] env[63345]: DEBUG nova.compute.manager [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 867.638621] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a7e9a1-086f-4bdc-9881-c410b4ffb71f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.651418] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4858aa9d-afef-4c2e-8d29-df21c4bbcceb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.666708] env[63345]: DEBUG nova.compute.provider_tree [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.769295] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.769669] env[63345]: DEBUG nova.compute.manager [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Instance network_info: |[{"id": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "address": "fa:16:3e:80:1f:9f", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e054cb2-eb", "ovs_interfaceid": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 867.770137] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:1f:9f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f35e69ef-c2c8-4b8c-9887-33e97b242c0a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e054cb2-eb47-4dd3-8ec7-d8205d577337', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.778276] env[63345]: DEBUG oslo.service.loopingcall [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 867.778822] env[63345]: DEBUG oslo_concurrency.lockutils [req-dd7ca9f2-eefe-4102-acd1-c2adcb9c61e2 req-58b66ff5-5d20-4d89-b62d-98203906acc1 service nova] Releasing lock "refresh_cache-5e20b33c-1481-4bd3-b269-29a70cc3150d" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.779272] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 867.779568] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1430415c-a544-4a0e-b788-ac3a4b30d5e2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.800808] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.800808] env[63345]: value = "task-1017302" [ 867.800808] env[63345]: _type = "Task" [ 867.800808] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.810086] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017302, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.990912] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.076445] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017301, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.170086] env[63345]: DEBUG nova.scheduler.client.report [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 868.313824] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017302, 'name': CreateVM_Task, 'duration_secs': 0.413852} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.314110] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 868.315014] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.315261] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.315686] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 868.316023] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c02cdd99-6fc4-4131-a1b9-0b62d53ed178 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.322686] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 868.322686] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52ec6c68-9051-dd20-2bb9-ef379a480513" [ 868.322686] env[63345]: _type = "Task" [ 868.322686] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.330870] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52ec6c68-9051-dd20-2bb9-ef379a480513, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.511777] env[63345]: INFO nova.compute.manager [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Rebuilding instance [ 868.555263] env[63345]: DEBUG nova.compute.manager [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 868.556557] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498da586-dc1f-4fd0-95e0-243fa759a729 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.575470] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017301, 'name': CloneVM_Task, 'duration_secs': 1.846483} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.575752] env[63345]: INFO nova.virt.vmwareapi.vmops [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Created linked-clone VM from snapshot [ 868.576596] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b9d321-2815-4a66-9035-94ea717882f8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.585131] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Uploading image 487d34b5-b85c-4d35-8b15-fd7347b3dcfd {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 868.591991] env[63345]: DEBUG nova.compute.manager [req-b7f9463d-4bd6-44e7-b4b6-7a8c19aa8727 req-dc79bb69-74aa-4c2d-b61c-dbce82f7715c service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Received event network-changed-9e054cb2-eb47-4dd3-8ec7-d8205d577337 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 868.592178] env[63345]: DEBUG nova.compute.manager [req-b7f9463d-4bd6-44e7-b4b6-7a8c19aa8727 req-dc79bb69-74aa-4c2d-b61c-dbce82f7715c service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Refreshing instance network info cache due to event network-changed-9e054cb2-eb47-4dd3-8ec7-d8205d577337. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 868.592440] env[63345]: DEBUG oslo_concurrency.lockutils [req-b7f9463d-4bd6-44e7-b4b6-7a8c19aa8727 req-dc79bb69-74aa-4c2d-b61c-dbce82f7715c service nova] Acquiring lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.592597] env[63345]: DEBUG oslo_concurrency.lockutils [req-b7f9463d-4bd6-44e7-b4b6-7a8c19aa8727 req-dc79bb69-74aa-4c2d-b61c-dbce82f7715c service nova] Acquired lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.592765] env[63345]: DEBUG nova.network.neutron [req-b7f9463d-4bd6-44e7-b4b6-7a8c19aa8727 req-dc79bb69-74aa-4c2d-b61c-dbce82f7715c service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Refreshing network info cache for port 9e054cb2-eb47-4dd3-8ec7-d8205d577337 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 868.610681] env[63345]: DEBUG oslo_vmware.rw_handles [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 868.610681] env[63345]: value = "vm-226079" [ 868.610681] env[63345]: _type = "VirtualMachine" [ 868.610681] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 868.611283] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-fb2c54fb-99cd-4414-8a00-9f1b7674862a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.619321] env[63345]: DEBUG oslo_vmware.rw_handles [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lease: (returnval){ [ 868.619321] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5242a778-2452-5473-096d-211d198ec2cd" [ 868.619321] env[63345]: _type = "HttpNfcLease" [ 868.619321] env[63345]: } obtained for exporting VM: (result){ [ 868.619321] env[63345]: value = "vm-226079" [ 868.619321] env[63345]: _type = "VirtualMachine" [ 868.619321] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 868.619614] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the lease: (returnval){ [ 868.619614] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5242a778-2452-5473-096d-211d198ec2cd" [ 868.619614] env[63345]: _type = "HttpNfcLease" [ 868.619614] env[63345]: } to be ready. {{(pid=63345) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 868.636252] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 868.636252] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5242a778-2452-5473-096d-211d198ec2cd" [ 868.636252] env[63345]: _type = "HttpNfcLease" [ 868.636252] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 868.648908] env[63345]: DEBUG nova.compute.manager [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 868.674546] env[63345]: DEBUG nova.virt.hardware [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 868.674546] env[63345]: DEBUG nova.virt.hardware [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 868.674546] env[63345]: DEBUG nova.virt.hardware [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 868.674546] env[63345]: DEBUG nova.virt.hardware [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 868.674546] env[63345]: DEBUG nova.virt.hardware [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 868.674546] env[63345]: DEBUG nova.virt.hardware [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 868.674546] env[63345]: DEBUG nova.virt.hardware [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 868.674546] env[63345]: DEBUG nova.virt.hardware [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 868.674546] env[63345]: DEBUG nova.virt.hardware [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 868.674546] env[63345]: DEBUG nova.virt.hardware [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 868.674879] env[63345]: DEBUG nova.virt.hardware [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 868.675807] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.074s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.678198] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d30d634-0db9-44e7-818d-3aa551fbd3c4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.683506] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.505s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.683506] env[63345]: DEBUG nova.objects.instance [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lazy-loading 'resources' on Instance uuid f37b4a95-0725-4a84-b726-fd4f26e87020 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 868.689276] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4268150b-76d0-4e12-b418-2d1a9d25f075 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.708837] env[63345]: INFO nova.scheduler.client.report [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Deleted allocations for instance 4a59b565-571f-48ef-97bd-bed9853e2d8e [ 868.834999] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52ec6c68-9051-dd20-2bb9-ef379a480513, 'name': SearchDatastore_Task, 'duration_secs': 0.016278} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.834999] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.834999] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 868.835286] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.835286] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.836020] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.836020] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a4a4aea9-39fe-42c9-815a-0dbf699794e2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.850087] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 868.850296] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 868.851054] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27f28e1c-ba18-4fe9-829b-ad0b0fac829b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.857181] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 868.857181] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c881a0-d9ca-e4b5-d08d-658978a58163" [ 868.857181] env[63345]: _type = "Task" [ 868.857181] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.867442] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c881a0-d9ca-e4b5-d08d-658978a58163, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.973963] env[63345]: DEBUG nova.compute.manager [req-a15b71ac-db99-4565-a8a8-cb2918735a93 req-161fe166-444b-4d69-810c-256645729a2b service nova] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Received event network-vif-plugged-5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 868.974252] env[63345]: DEBUG oslo_concurrency.lockutils [req-a15b71ac-db99-4565-a8a8-cb2918735a93 req-161fe166-444b-4d69-810c-256645729a2b service nova] Acquiring lock "14198777-9091-4c69-8928-c83135acc7d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.974532] env[63345]: DEBUG oslo_concurrency.lockutils [req-a15b71ac-db99-4565-a8a8-cb2918735a93 req-161fe166-444b-4d69-810c-256645729a2b service nova] Lock "14198777-9091-4c69-8928-c83135acc7d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.974714] env[63345]: DEBUG oslo_concurrency.lockutils [req-a15b71ac-db99-4565-a8a8-cb2918735a93 req-161fe166-444b-4d69-810c-256645729a2b service nova] Lock "14198777-9091-4c69-8928-c83135acc7d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.974889] env[63345]: DEBUG nova.compute.manager [req-a15b71ac-db99-4565-a8a8-cb2918735a93 req-161fe166-444b-4d69-810c-256645729a2b service nova] [instance: 14198777-9091-4c69-8928-c83135acc7d2] No waiting events found dispatching network-vif-plugged-5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 868.977979] env[63345]: WARNING nova.compute.manager [req-a15b71ac-db99-4565-a8a8-cb2918735a93 req-161fe166-444b-4d69-810c-256645729a2b service nova] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Received unexpected event network-vif-plugged-5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d for instance with vm_state building and task_state spawning. [ 869.065560] env[63345]: DEBUG nova.network.neutron [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Successfully updated port: 5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 869.130036] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 869.130036] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5242a778-2452-5473-096d-211d198ec2cd" [ 869.130036] env[63345]: _type = "HttpNfcLease" [ 869.130036] env[63345]: } is ready. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 869.130381] env[63345]: DEBUG oslo_vmware.rw_handles [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 869.130381] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5242a778-2452-5473-096d-211d198ec2cd" [ 869.130381] env[63345]: _type = "HttpNfcLease" [ 869.130381] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 869.131013] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59abde5a-459b-4c48-bac8-bfd2271d7a33 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.141405] env[63345]: DEBUG oslo_vmware.rw_handles [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ce0b59-aae8-839b-3975-7ad6fdc485f6/disk-0.vmdk from lease info. {{(pid=63345) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 869.141405] env[63345]: DEBUG oslo_vmware.rw_handles [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ce0b59-aae8-839b-3975-7ad6fdc485f6/disk-0.vmdk for reading. {{(pid=63345) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 869.220796] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b8082a40-6221-4889-b6c8-039a804e534a tempest-SecurityGroupsTestJSON-257927185 tempest-SecurityGroupsTestJSON-257927185-project-member] Lock "4a59b565-571f-48ef-97bd-bed9853e2d8e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.503s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.294989] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.295230] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.299417] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cf45ad16-210c-4a9d-9fa1-317878344a7d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.370979] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c881a0-d9ca-e4b5-d08d-658978a58163, 'name': SearchDatastore_Task, 'duration_secs': 0.022431} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.371913] env[63345]: DEBUG nova.network.neutron [req-b7f9463d-4bd6-44e7-b4b6-7a8c19aa8727 req-dc79bb69-74aa-4c2d-b61c-dbce82f7715c service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Updated VIF entry in instance network info cache for port 9e054cb2-eb47-4dd3-8ec7-d8205d577337. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 869.372264] env[63345]: DEBUG nova.network.neutron [req-b7f9463d-4bd6-44e7-b4b6-7a8c19aa8727 req-dc79bb69-74aa-4c2d-b61c-dbce82f7715c service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Updating instance_info_cache with network_info: [{"id": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "address": "fa:16:3e:80:1f:9f", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e054cb2-eb", "ovs_interfaceid": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.374161] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65fe0bb3-ce97-4203-9e13-6ce445356fd4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.383158] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 869.383158] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52759aab-a95e-612a-f3bd-08be25aafd67" [ 869.383158] env[63345]: _type = "Task" [ 869.383158] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.393512] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52759aab-a95e-612a-f3bd-08be25aafd67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.564033] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25ce1e2-3a5b-4608-89e0-7df15bdf7548 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.568731] env[63345]: DEBUG oslo_vmware.rw_handles [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523b5f42-a0a3-36aa-332a-57d34a1a5a9c/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 869.568906] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "refresh_cache-14198777-9091-4c69-8928-c83135acc7d2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.569159] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquired lock "refresh_cache-14198777-9091-4c69-8928-c83135acc7d2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.569235] env[63345]: DEBUG nova.network.neutron [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 869.571986] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3a117f-089d-4a2a-86fd-9ae08a9cc036 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.574737] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 869.574993] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8b8f8d7-46ef-42e8-8487-f0781ead2f10 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.582228] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751a84d6-2b8c-4eee-badb-44bb62dcff49 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.585776] env[63345]: DEBUG oslo_vmware.rw_handles [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523b5f42-a0a3-36aa-332a-57d34a1a5a9c/disk-0.vmdk is in state: ready. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 869.585888] env[63345]: ERROR oslo_vmware.rw_handles [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523b5f42-a0a3-36aa-332a-57d34a1a5a9c/disk-0.vmdk due to incomplete transfer. [ 869.587095] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-db2e2dbb-7f8e-4ad4-b6ee-9e0daf44a34f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.588580] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 869.588580] env[63345]: value = "task-1017304" [ 869.588580] env[63345]: _type = "Task" [ 869.588580] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.624081] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b09bb012-c292-4ed5-8ea8-29b2753f47ec {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.626891] env[63345]: DEBUG oslo_vmware.rw_handles [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523b5f42-a0a3-36aa-332a-57d34a1a5a9c/disk-0.vmdk. {{(pid=63345) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 869.627108] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Uploaded image 163b8ba5-dbfa-4890-b990-7e227e0ccf91 to the Glance image server {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 869.629658] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Destroying the VM {{(pid=63345) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 869.633447] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9c263f73-4047-42d1-b60d-d3350df30f2a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.635265] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017304, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.642402] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee4b79a-6952-43dc-92da-a8a37d9e7b61 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.646815] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 869.646815] env[63345]: value = "task-1017305" [ 869.646815] env[63345]: _type = "Task" [ 869.646815] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.660434] env[63345]: DEBUG nova.compute.provider_tree [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.667865] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017305, 'name': Destroy_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.806076] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.806853] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Starting heal instance info cache {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 869.875519] env[63345]: DEBUG oslo_concurrency.lockutils [req-b7f9463d-4bd6-44e7-b4b6-7a8c19aa8727 req-dc79bb69-74aa-4c2d-b61c-dbce82f7715c service nova] Releasing lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.896086] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52759aab-a95e-612a-f3bd-08be25aafd67, 'name': SearchDatastore_Task, 'duration_secs': 0.015731} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.896537] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.896924] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 0da64b45-fa00-4fe8-8d1d-df586f27743f/0da64b45-fa00-4fe8-8d1d-df586f27743f.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 869.897344] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd2dd419-1d0f-47ba-8174-af4a2647dbfd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.906191] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 869.906191] env[63345]: value = "task-1017306" [ 869.906191] env[63345]: _type = "Task" [ 869.906191] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.916677] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017306, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.102475] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017304, 'name': PowerOffVM_Task, 'duration_secs': 0.332051} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.102954] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 870.103332] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 870.104284] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92a0a9f4-f935-4042-9c68-69ffea97b87b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.108130] env[63345]: DEBUG nova.network.neutron [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 870.117064] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 870.117064] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58c10268-5896-47a4-a7cf-a96825088dd8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.161862] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017305, 'name': Destroy_Task} progress is 33%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.162880] env[63345]: DEBUG nova.scheduler.client.report [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 870.195987] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 870.196815] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Deleting contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 870.196815] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Deleting the datastore file [datastore1] 3a85df04-3997-48a3-8992-f24fe997b3cc {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 870.200261] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b1e1e00-ef0a-4f9d-82e3-3c3db856438e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.208445] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 870.208445] env[63345]: value = "task-1017308" [ 870.208445] env[63345]: _type = "Task" [ 870.208445] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.224592] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017308, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.421189] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017306, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.579097] env[63345]: DEBUG nova.network.neutron [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Updating instance_info_cache with network_info: [{"id": "5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d", "address": "fa:16:3e:c7:8d:6f", "network": {"id": "5159b9e8-dfb2-472c-bec6-f963867f9baf", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-134143484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be4b8982dd144c969cb530f52ed9297b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a1f46e9-15", "ovs_interfaceid": "5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.658336] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017305, 'name': Destroy_Task, 'duration_secs': 0.578243} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.658760] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Destroyed the VM [ 870.659185] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Deleting Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 870.659561] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2c4d5c6f-d701-48ee-97d8-5d3af5207ba1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.670072] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.988s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.672648] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 870.672648] env[63345]: value = "task-1017309" [ 870.672648] env[63345]: _type = "Task" [ 870.672648] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.673596] env[63345]: DEBUG oslo_concurrency.lockutils [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.946s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.675787] env[63345]: INFO nova.compute.claims [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 870.691532] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017309, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.715581] env[63345]: INFO nova.scheduler.client.report [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleted allocations for instance f37b4a95-0725-4a84-b726-fd4f26e87020 [ 870.723758] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017308, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.368659} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.724463] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 870.724821] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Deleted contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 870.725216] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 870.920948] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017306, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.767584} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.921449] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 0da64b45-fa00-4fe8-8d1d-df586f27743f/0da64b45-fa00-4fe8-8d1d-df586f27743f.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 870.922040] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 870.922637] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0a7361e3-ab65-42ca-bc84-53f0e46b1c9b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.938011] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 870.938011] env[63345]: value = "task-1017310" [ 870.938011] env[63345]: _type = "Task" [ 870.938011] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.952966] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017310, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.019218] env[63345]: DEBUG nova.compute.manager [req-5a67aef5-ec65-49cd-8d09-8d71fe0bbfb2 req-c3947829-1c88-4d65-aaaf-c89f6ec7dd96 service nova] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Received event network-changed-5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 871.019484] env[63345]: DEBUG nova.compute.manager [req-5a67aef5-ec65-49cd-8d09-8d71fe0bbfb2 req-c3947829-1c88-4d65-aaaf-c89f6ec7dd96 service nova] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Refreshing instance network info cache due to event network-changed-5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 871.019818] env[63345]: DEBUG oslo_concurrency.lockutils [req-5a67aef5-ec65-49cd-8d09-8d71fe0bbfb2 req-c3947829-1c88-4d65-aaaf-c89f6ec7dd96 service nova] Acquiring lock "refresh_cache-14198777-9091-4c69-8928-c83135acc7d2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.081872] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Releasing lock "refresh_cache-14198777-9091-4c69-8928-c83135acc7d2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.082766] env[63345]: DEBUG nova.compute.manager [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Instance network_info: |[{"id": "5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d", "address": "fa:16:3e:c7:8d:6f", "network": {"id": "5159b9e8-dfb2-472c-bec6-f963867f9baf", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-134143484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be4b8982dd144c969cb530f52ed9297b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a1f46e9-15", "ovs_interfaceid": "5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 871.083414] env[63345]: DEBUG oslo_concurrency.lockutils [req-5a67aef5-ec65-49cd-8d09-8d71fe0bbfb2 req-c3947829-1c88-4d65-aaaf-c89f6ec7dd96 service nova] Acquired lock "refresh_cache-14198777-9091-4c69-8928-c83135acc7d2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.083631] env[63345]: DEBUG nova.network.neutron [req-5a67aef5-ec65-49cd-8d09-8d71fe0bbfb2 req-c3947829-1c88-4d65-aaaf-c89f6ec7dd96 service nova] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Refreshing network info cache for port 5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 871.085269] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:8d:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31ac3fea-ebf4-4bed-bf70-1eaecdf71280', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 871.094561] env[63345]: DEBUG oslo.service.loopingcall [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 871.096213] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 871.096524] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-888cf355-cba0-47c1-8e7f-7ddede20d85b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.120128] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 871.120128] env[63345]: value = "task-1017311" [ 871.120128] env[63345]: _type = "Task" [ 871.120128] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.130619] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017311, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.196259] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017309, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.232023] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dfd9c36c-e449-49a3-9575-a84cd803ef8e tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "f37b4a95-0725-4a84-b726-fd4f26e87020" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.565s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.317091] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.318663] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquired lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.318663] env[63345]: DEBUG nova.network.neutron [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Forcefully refreshing network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 871.451152] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017310, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081667} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.451152] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 871.452186] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee82734-bee8-4aa2-b6f4-dd7a386f1f48 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.477583] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] 0da64b45-fa00-4fe8-8d1d-df586f27743f/0da64b45-fa00-4fe8-8d1d-df586f27743f.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 871.477861] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2b755ac-74b2-4f29-a365-bc3c04fe552f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.501640] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 871.501640] env[63345]: value = "task-1017312" [ 871.501640] env[63345]: _type = "Task" [ 871.501640] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.512595] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017312, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.633619] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017311, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.699598] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017309, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.785218] env[63345]: DEBUG nova.virt.hardware [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 871.785788] env[63345]: DEBUG nova.virt.hardware [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 871.787051] env[63345]: DEBUG nova.virt.hardware [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 871.787051] env[63345]: DEBUG nova.virt.hardware [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 871.787051] env[63345]: DEBUG nova.virt.hardware [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 871.787051] env[63345]: DEBUG nova.virt.hardware [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 871.787051] env[63345]: DEBUG nova.virt.hardware [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 871.787581] env[63345]: DEBUG nova.virt.hardware [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 871.788016] env[63345]: DEBUG nova.virt.hardware [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 871.788416] env[63345]: DEBUG nova.virt.hardware [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 871.788735] env[63345]: DEBUG nova.virt.hardware [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 871.790497] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59289a13-54b4-44a0-a8c6-e0de4a66904b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.802948] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4df9a1-5675-4e17-aa3f-713e2687c876 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.827063] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:c7:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bd3c6b64-aba2-4bdc-a693-3b4dff3ed861', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a44ad561-3547-45fd-a941-c72ff5211989', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 871.834430] env[63345]: DEBUG oslo.service.loopingcall [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 871.839463] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 871.840479] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5549c973-c2a4-4a2f-9819-bb321730479d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.867714] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 871.867714] env[63345]: value = "task-1017313" [ 871.867714] env[63345]: _type = "Task" [ 871.867714] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.883092] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017313, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.901205] env[63345]: DEBUG nova.network.neutron [req-5a67aef5-ec65-49cd-8d09-8d71fe0bbfb2 req-c3947829-1c88-4d65-aaaf-c89f6ec7dd96 service nova] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Updated VIF entry in instance network info cache for port 5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 871.901886] env[63345]: DEBUG nova.network.neutron [req-5a67aef5-ec65-49cd-8d09-8d71fe0bbfb2 req-c3947829-1c88-4d65-aaaf-c89f6ec7dd96 service nova] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Updating instance_info_cache with network_info: [{"id": "5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d", "address": "fa:16:3e:c7:8d:6f", "network": {"id": "5159b9e8-dfb2-472c-bec6-f963867f9baf", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-134143484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be4b8982dd144c969cb530f52ed9297b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a1f46e9-15", "ovs_interfaceid": "5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.015177] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017312, 'name': ReconfigVM_Task, 'duration_secs': 0.375694} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.015177] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Reconfigured VM instance instance-0000004e to attach disk [datastore2] 0da64b45-fa00-4fe8-8d1d-df586f27743f/0da64b45-fa00-4fe8-8d1d-df586f27743f.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 872.020408] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-95e4275e-9198-44e3-8843-8733f497061c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.029176] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 872.029176] env[63345]: value = "task-1017314" [ 872.029176] env[63345]: _type = "Task" [ 872.029176] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.037277] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017314, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.131951] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017311, 'name': CreateVM_Task, 'duration_secs': 0.520072} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.134973] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 872.136115] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.137290] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.137290] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 872.137290] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c87fdd2f-b022-4e46-a655-5d1c07aa65fe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.144264] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 872.144264] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52abc5cb-2b1c-6dd0-3d22-e1b6cbaf862f" [ 872.144264] env[63345]: _type = "Task" [ 872.144264] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.156013] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52abc5cb-2b1c-6dd0-3d22-e1b6cbaf862f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.170423] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88bf0153-1580-4897-87d5-426fbab9d5a6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.179862] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98477ba1-4e58-4d5f-aa1c-96af7f9fe2bf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.194570] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017309, 'name': RemoveSnapshot_Task, 'duration_secs': 1.028554} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.221580] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Deleted Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 872.221969] env[63345]: DEBUG nova.compute.manager [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 872.223516] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a22233ef-9183-48e4-b795-ffccaab907a1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.227286] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18d4399-7cdf-487e-9929-47524be3673d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.239423] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0b5ee1-19f1-43ad-9349-52f3271e4ac0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.258416] env[63345]: DEBUG nova.compute.provider_tree [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 872.383907] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017313, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.407838] env[63345]: DEBUG oslo_concurrency.lockutils [req-5a67aef5-ec65-49cd-8d09-8d71fe0bbfb2 req-c3947829-1c88-4d65-aaaf-c89f6ec7dd96 service nova] Releasing lock "refresh_cache-14198777-9091-4c69-8928-c83135acc7d2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.538759] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017314, 'name': Rename_Task, 'duration_secs': 0.191674} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.539019] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 872.539520] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8cb7d9d6-7066-45e1-8e74-f0479be9272e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.547621] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 872.547621] env[63345]: value = "task-1017315" [ 872.547621] env[63345]: _type = "Task" [ 872.547621] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.559168] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017315, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.658454] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52abc5cb-2b1c-6dd0-3d22-e1b6cbaf862f, 'name': SearchDatastore_Task, 'duration_secs': 0.011244} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.658869] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.659366] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 872.659495] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.659696] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.659901] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 872.660323] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-974b78cd-8022-4014-ad78-7d43b54d9f41 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.672528] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 872.672752] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 872.673530] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56562a20-82e0-4ed4-9fdc-26263290ef17 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.679936] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 872.679936] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5244e5b5-8f30-cdac-7d72-c960af8ff21f" [ 872.679936] env[63345]: _type = "Task" [ 872.679936] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.688397] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5244e5b5-8f30-cdac-7d72-c960af8ff21f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.752059] env[63345]: INFO nova.compute.manager [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Shelve offloading [ 872.757195] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "c84c8b9a-9164-4dd7-b094-dd09c15c6f21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.757462] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "c84c8b9a-9164-4dd7-b094-dd09c15c6f21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.762541] env[63345]: DEBUG nova.scheduler.client.report [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 872.884498] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017313, 'name': CreateVM_Task, 'duration_secs': 0.561343} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.885679] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 872.885679] env[63345]: DEBUG oslo_concurrency.lockutils [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.885679] env[63345]: DEBUG oslo_concurrency.lockutils [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.885812] env[63345]: DEBUG oslo_concurrency.lockutils [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 872.886132] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70f3792d-7f10-4408-a284-0a895572c882 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.892418] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 872.892418] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]523cc977-b997-93ee-e040-a2289f6f4b4c" [ 872.892418] env[63345]: _type = "Task" [ 872.892418] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.902160] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]523cc977-b997-93ee-e040-a2289f6f4b4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.061147] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017315, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.086894] env[63345]: DEBUG nova.network.neutron [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Updating instance_info_cache with network_info: [{"id": "cf06de95-5747-4226-b66c-b9ccca47321d", "address": "fa:16:3e:9e:46:44", "network": {"id": "403ac06e-e45e-4215-bf0c-16ddd583ddc5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1349318740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac5c2a653dae436c97514507939c4e3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf06de95-57", "ovs_interfaceid": "cf06de95-5747-4226-b66c-b9ccca47321d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.192519] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5244e5b5-8f30-cdac-7d72-c960af8ff21f, 'name': SearchDatastore_Task, 'duration_secs': 0.016445} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.193636] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f1e6ba9-2400-4946-a92f-0eff1450ce0a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.203560] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 873.203560] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52a4001c-97f0-f4f6-6664-2746a723506c" [ 873.203560] env[63345]: _type = "Task" [ 873.203560] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.211718] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a4001c-97f0-f4f6-6664-2746a723506c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.260128] env[63345]: DEBUG nova.compute.manager [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 873.263552] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 873.264769] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa5780d3-80a3-4493-9027-e10daab86ef1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.266978] env[63345]: DEBUG oslo_concurrency.lockutils [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.594s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.267566] env[63345]: DEBUG nova.compute.manager [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 873.270962] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.670s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.270962] env[63345]: DEBUG nova.objects.instance [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63345) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 873.279031] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 873.279031] env[63345]: value = "task-1017316" [ 873.279031] env[63345]: _type = "Task" [ 873.279031] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.287217] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] VM already powered off {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 873.287562] env[63345]: DEBUG nova.compute.manager [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 873.288440] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf2afe58-0124-4141-8bdf-048ffbf95418 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.295542] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.405652] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]523cc977-b997-93ee-e040-a2289f6f4b4c, 'name': SearchDatastore_Task, 'duration_secs': 0.011407} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.406046] env[63345]: DEBUG oslo_concurrency.lockutils [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.406309] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 873.406531] env[63345]: DEBUG oslo_concurrency.lockutils [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.561610] env[63345]: DEBUG oslo_vmware.api [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017315, 'name': PowerOnVM_Task, 'duration_secs': 0.598368} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.561986] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 873.562292] env[63345]: INFO nova.compute.manager [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Took 9.56 seconds to spawn the instance on the hypervisor. [ 873.562479] env[63345]: DEBUG nova.compute.manager [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 873.563371] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f24bb7-997d-4659-b9c6-b4a253587293 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.589876] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Releasing lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.590160] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Updated the network info_cache for instance {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 873.590498] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquired lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.590762] env[63345]: DEBUG nova.network.neutron [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 873.592148] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 873.592426] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 873.592879] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 873.593107] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 873.593311] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 873.593539] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 873.593718] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63345) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 873.593907] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager.update_available_resource {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 873.712465] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a4001c-97f0-f4f6-6664-2746a723506c, 'name': SearchDatastore_Task, 'duration_secs': 0.015441} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.712742] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.713027] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 14198777-9091-4c69-8928-c83135acc7d2/14198777-9091-4c69-8928-c83135acc7d2.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 873.713329] env[63345]: DEBUG oslo_concurrency.lockutils [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.713524] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 873.713745] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b53b4d0b-fee5-4c95-8acb-99b52c142237 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.715728] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88dd53cd-479a-4f33-a7af-ceec1085d758 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.725059] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 873.725059] env[63345]: value = "task-1017317" [ 873.725059] env[63345]: _type = "Task" [ 873.725059] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.725864] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 873.726257] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 873.729807] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6b56f74-54b3-43ed-b48f-2a24d1ca23bc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.736688] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 873.736688] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]526779be-6db8-8e54-396e-5fd497fe2898" [ 873.736688] env[63345]: _type = "Task" [ 873.736688] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.740133] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017317, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.748881] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526779be-6db8-8e54-396e-5fd497fe2898, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.774652] env[63345]: DEBUG nova.compute.utils [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 873.776198] env[63345]: DEBUG nova.compute.manager [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Not allocating networking since 'none' was specified. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 873.782638] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.087115] env[63345]: INFO nova.compute.manager [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Took 36.27 seconds to build instance. [ 874.097381] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.237068] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017317, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.252316] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526779be-6db8-8e54-396e-5fd497fe2898, 'name': SearchDatastore_Task, 'duration_secs': 0.012565} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.253293] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d0c830b-7b3c-48c6-bf43-e31a5221a042 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.262314] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 874.262314] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5260c0d4-cc06-7849-2390-bdfddaba66ee" [ 874.262314] env[63345]: _type = "Task" [ 874.262314] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.271258] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5260c0d4-cc06-7849-2390-bdfddaba66ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.281101] env[63345]: DEBUG nova.compute.manager [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 874.284718] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a3327ce-2dfd-4b9e-a67e-69b11ea40ba2 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.285985] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.005s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.287440] env[63345]: INFO nova.compute.claims [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 874.563193] env[63345]: DEBUG nova.network.neutron [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Updating instance_info_cache with network_info: [{"id": "cf06de95-5747-4226-b66c-b9ccca47321d", "address": "fa:16:3e:9e:46:44", "network": {"id": "403ac06e-e45e-4215-bf0c-16ddd583ddc5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1349318740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac5c2a653dae436c97514507939c4e3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf06de95-57", "ovs_interfaceid": "cf06de95-5747-4226-b66c-b9ccca47321d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.589253] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5cac58fc-ec16-4540-991c-bcf2eeca5f53 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "0da64b45-fa00-4fe8-8d1d-df586f27743f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.784s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.737559] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017317, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555992} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.737853] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 14198777-9091-4c69-8928-c83135acc7d2/14198777-9091-4c69-8928-c83135acc7d2.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 874.738096] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 874.738365] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d6ce220c-a0f3-452b-a77f-fbd29ed71d27 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.746447] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 874.746447] env[63345]: value = "task-1017318" [ 874.746447] env[63345]: _type = "Task" [ 874.746447] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.756719] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017318, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.773733] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5260c0d4-cc06-7849-2390-bdfddaba66ee, 'name': SearchDatastore_Task, 'duration_secs': 0.015938} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.773897] env[63345]: DEBUG oslo_concurrency.lockutils [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.774190] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 3a85df04-3997-48a3-8992-f24fe997b3cc/3a85df04-3997-48a3-8992-f24fe997b3cc.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 874.774463] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7fdadf4b-c7ee-443d-9bd2-6ce72d1e09e4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.783049] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 874.783049] env[63345]: value = "task-1017319" [ 874.783049] env[63345]: _type = "Task" [ 874.783049] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.801021] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017319, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.066629] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Releasing lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.259326] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017318, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079516} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.259663] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 875.260616] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f70e025-b941-46ba-93eb-c3a560e18e60 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.292482] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 14198777-9091-4c69-8928-c83135acc7d2/14198777-9091-4c69-8928-c83135acc7d2.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 875.292919] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10444a04-9bda-40b2-9bf5-9b52b0e41251 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.322594] env[63345]: DEBUG nova.compute.manager [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 875.334307] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017319, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516619} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.336519] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 3a85df04-3997-48a3-8992-f24fe997b3cc/3a85df04-3997-48a3-8992-f24fe997b3cc.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 875.336832] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 875.337411] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 875.337411] env[63345]: value = "task-1017320" [ 875.337411] env[63345]: _type = "Task" [ 875.337411] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.337672] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d6c43fe2-62d6-41b2-8838-4fd9857d09ce {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.355848] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017320, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.358388] env[63345]: DEBUG nova.virt.hardware [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 875.358983] env[63345]: DEBUG nova.virt.hardware [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 875.359194] env[63345]: DEBUG nova.virt.hardware [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 875.360011] env[63345]: DEBUG nova.virt.hardware [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 875.360011] env[63345]: DEBUG nova.virt.hardware [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 875.360011] env[63345]: DEBUG nova.virt.hardware [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 875.360011] env[63345]: DEBUG nova.virt.hardware [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 875.360269] env[63345]: DEBUG nova.virt.hardware [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 875.360400] env[63345]: DEBUG nova.virt.hardware [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 875.360587] env[63345]: DEBUG nova.virt.hardware [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 875.360849] env[63345]: DEBUG nova.virt.hardware [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 875.363544] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f886c40-7579-453e-8ca4-a32041bf57ff {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.366960] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 875.366960] env[63345]: value = "task-1017321" [ 875.366960] env[63345]: _type = "Task" [ 875.366960] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.379067] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115c0f41-33b3-4936-9dcc-1c22aba365fc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.389379] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017321, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.400919] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Instance VIF info [] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 875.406751] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Creating folder: Project (71459f9e82c04d6295dc2472a0635d03). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 875.409753] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c18c21a7-6cfe-4665-bcd5-438eb0c98261 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.426300] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Created folder: Project (71459f9e82c04d6295dc2472a0635d03) in parent group-v225918. [ 875.426547] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Creating folder: Instances. Parent ref: group-v226083. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 875.426837] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f8bf5c4-c691-4de1-b4bc-4160d2a01a5f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.438053] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Created folder: Instances in parent group-v226083. [ 875.438386] env[63345]: DEBUG oslo.service.loopingcall [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 875.438614] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 875.438839] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b9f5fda8-db80-421b-947c-770118cfdccd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.456476] env[63345]: DEBUG nova.compute.manager [req-ba6c9e37-dd8e-48a9-b1b5-fc55e3a47024 req-759bac36-90a9-4658-95ea-7cfbaa8fff44 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Received event network-changed-9e054cb2-eb47-4dd3-8ec7-d8205d577337 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 875.456705] env[63345]: DEBUG nova.compute.manager [req-ba6c9e37-dd8e-48a9-b1b5-fc55e3a47024 req-759bac36-90a9-4658-95ea-7cfbaa8fff44 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Refreshing instance network info cache due to event network-changed-9e054cb2-eb47-4dd3-8ec7-d8205d577337. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 875.457054] env[63345]: DEBUG oslo_concurrency.lockutils [req-ba6c9e37-dd8e-48a9-b1b5-fc55e3a47024 req-759bac36-90a9-4658-95ea-7cfbaa8fff44 service nova] Acquiring lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.457171] env[63345]: DEBUG oslo_concurrency.lockutils [req-ba6c9e37-dd8e-48a9-b1b5-fc55e3a47024 req-759bac36-90a9-4658-95ea-7cfbaa8fff44 service nova] Acquired lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.457256] env[63345]: DEBUG nova.network.neutron [req-ba6c9e37-dd8e-48a9-b1b5-fc55e3a47024 req-759bac36-90a9-4658-95ea-7cfbaa8fff44 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Refreshing network info cache for port 9e054cb2-eb47-4dd3-8ec7-d8205d577337 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 875.461929] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 875.461929] env[63345]: value = "task-1017324" [ 875.461929] env[63345]: _type = "Task" [ 875.461929] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.476620] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017324, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.568806] env[63345]: DEBUG nova.compute.manager [req-aa8be050-c29c-4072-b034-8f88ff521976 req-d78a02c7-69e3-4ad6-9126-06aa30da9f67 service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Received event network-vif-unplugged-cf06de95-5747-4226-b66c-b9ccca47321d {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 875.568887] env[63345]: DEBUG oslo_concurrency.lockutils [req-aa8be050-c29c-4072-b034-8f88ff521976 req-d78a02c7-69e3-4ad6-9126-06aa30da9f67 service nova] Acquiring lock "070a834d-6478-4705-8df0-2a27c8780507-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.569117] env[63345]: DEBUG oslo_concurrency.lockutils [req-aa8be050-c29c-4072-b034-8f88ff521976 req-d78a02c7-69e3-4ad6-9126-06aa30da9f67 service nova] Lock "070a834d-6478-4705-8df0-2a27c8780507-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.569239] env[63345]: DEBUG oslo_concurrency.lockutils [req-aa8be050-c29c-4072-b034-8f88ff521976 req-d78a02c7-69e3-4ad6-9126-06aa30da9f67 service nova] Lock "070a834d-6478-4705-8df0-2a27c8780507-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.569435] env[63345]: DEBUG nova.compute.manager [req-aa8be050-c29c-4072-b034-8f88ff521976 req-d78a02c7-69e3-4ad6-9126-06aa30da9f67 service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] No waiting events found dispatching network-vif-unplugged-cf06de95-5747-4226-b66c-b9ccca47321d {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 875.569610] env[63345]: WARNING nova.compute.manager [req-aa8be050-c29c-4072-b034-8f88ff521976 req-d78a02c7-69e3-4ad6-9126-06aa30da9f67 service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Received unexpected event network-vif-unplugged-cf06de95-5747-4226-b66c-b9ccca47321d for instance with vm_state shelved and task_state shelving_offloading. [ 875.577969] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 875.578870] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0309833-13aa-4ca2-bd52-0745ca8cce06 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.589961] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 875.590264] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-000e91b4-07de-4ac2-aa13-be158d70f83e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.698037] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 875.698037] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 875.698037] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Deleting the datastore file [datastore2] 070a834d-6478-4705-8df0-2a27c8780507 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 875.698477] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50032c23-688a-4295-a93f-975fae77d576 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.706909] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 875.706909] env[63345]: value = "task-1017326" [ 875.706909] env[63345]: _type = "Task" [ 875.706909] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.720328] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017326, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.748968] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0544df-8c78-4c67-97a6-233b6ad4ae53 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.759528] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532080cc-57bd-465d-9dc9-b87f4cdbe182 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.794084] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30b6bad-171e-4720-81df-fac5ca6aa5fe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.802533] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e422b14c-9879-4601-8532-a0913ab09298 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.817115] env[63345]: DEBUG nova.compute.provider_tree [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 875.852188] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017320, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.882088] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017321, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075354} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.882414] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 875.883398] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ede281-bab1-4c23-8f89-a47f80cd01d0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.917896] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] 3a85df04-3997-48a3-8992-f24fe997b3cc/3a85df04-3997-48a3-8992-f24fe997b3cc.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 875.918290] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0e2e486-55bd-4661-992d-d71498487108 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.940616] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 875.940616] env[63345]: value = "task-1017327" [ 875.940616] env[63345]: _type = "Task" [ 875.940616] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.949699] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017327, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.976477] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017324, 'name': CreateVM_Task, 'duration_secs': 0.50355} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.976654] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 875.977117] env[63345]: DEBUG oslo_concurrency.lockutils [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.977290] env[63345]: DEBUG oslo_concurrency.lockutils [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.977669] env[63345]: DEBUG oslo_concurrency.lockutils [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 875.977940] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f82b39a2-21fe-4090-bbbb-ad5dbd6e7e37 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.984864] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 875.984864] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]526208c4-79d2-8dce-e256-896657e4ca2a" [ 875.984864] env[63345]: _type = "Task" [ 875.984864] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.995957] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526208c4-79d2-8dce-e256-896657e4ca2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.221668] env[63345]: DEBUG oslo_vmware.api [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017326, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17455} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.221974] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 876.222229] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 876.222473] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 876.251831] env[63345]: INFO nova.scheduler.client.report [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Deleted allocations for instance 070a834d-6478-4705-8df0-2a27c8780507 [ 876.320653] env[63345]: DEBUG nova.scheduler.client.report [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 876.343168] env[63345]: DEBUG nova.network.neutron [req-ba6c9e37-dd8e-48a9-b1b5-fc55e3a47024 req-759bac36-90a9-4658-95ea-7cfbaa8fff44 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Updated VIF entry in instance network info cache for port 9e054cb2-eb47-4dd3-8ec7-d8205d577337. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 876.343649] env[63345]: DEBUG nova.network.neutron [req-ba6c9e37-dd8e-48a9-b1b5-fc55e3a47024 req-759bac36-90a9-4658-95ea-7cfbaa8fff44 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Updating instance_info_cache with network_info: [{"id": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "address": "fa:16:3e:80:1f:9f", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e054cb2-eb", "ovs_interfaceid": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.357066] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017320, 'name': ReconfigVM_Task, 'duration_secs': 0.608197} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.357373] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 14198777-9091-4c69-8928-c83135acc7d2/14198777-9091-4c69-8928-c83135acc7d2.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 876.358059] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-54432699-89be-48d9-b9df-361e300b2703 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.367115] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 876.367115] env[63345]: value = "task-1017328" [ 876.367115] env[63345]: _type = "Task" [ 876.367115] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.376606] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017328, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.451733] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017327, 'name': ReconfigVM_Task, 'duration_secs': 0.476875} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.451965] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Reconfigured VM instance instance-00000038 to attach disk [datastore2] 3a85df04-3997-48a3-8992-f24fe997b3cc/3a85df04-3997-48a3-8992-f24fe997b3cc.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 876.452662] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6975b9f7-fd50-4e0d-92c4-2010557e719a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.461879] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 876.461879] env[63345]: value = "task-1017329" [ 876.461879] env[63345]: _type = "Task" [ 876.461879] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.470799] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017329, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.497627] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526208c4-79d2-8dce-e256-896657e4ca2a, 'name': SearchDatastore_Task, 'duration_secs': 0.011452} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.497972] env[63345]: DEBUG oslo_concurrency.lockutils [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.498238] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 876.498496] env[63345]: DEBUG oslo_concurrency.lockutils [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.498646] env[63345]: DEBUG oslo_concurrency.lockutils [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.498831] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 876.499136] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4190d7cd-7611-4141-9123-cea79a13d837 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.510698] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 876.510894] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 876.511731] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f8dc02f-b954-4349-88fa-8fbd988a07da {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.517573] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 876.517573] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]528da3ae-c570-7658-b8c1-15c55e0dfb3b" [ 876.517573] env[63345]: _type = "Task" [ 876.517573] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.526811] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]528da3ae-c570-7658-b8c1-15c55e0dfb3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.758125] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.827144] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.541s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.827698] env[63345]: DEBUG nova.compute.manager [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 876.830699] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.704s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.830940] env[63345]: DEBUG nova.objects.instance [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lazy-loading 'resources' on Instance uuid 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 876.846665] env[63345]: DEBUG oslo_concurrency.lockutils [req-ba6c9e37-dd8e-48a9-b1b5-fc55e3a47024 req-759bac36-90a9-4658-95ea-7cfbaa8fff44 service nova] Releasing lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.879064] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017328, 'name': Rename_Task, 'duration_secs': 0.399053} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.879377] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 876.879635] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52bb2c29-eb2d-4f89-91cc-5ab8a6fc3ea2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.889054] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 876.889054] env[63345]: value = "task-1017330" [ 876.889054] env[63345]: _type = "Task" [ 876.889054] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.898595] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017330, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.974038] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017329, 'name': Rename_Task, 'duration_secs': 0.32954} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.974192] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 876.974380] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-38de848e-a40f-42b6-862e-531fdeabc85d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.982439] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 876.982439] env[63345]: value = "task-1017331" [ 876.982439] env[63345]: _type = "Task" [ 876.982439] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.992488] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017331, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.032666] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]528da3ae-c570-7658-b8c1-15c55e0dfb3b, 'name': SearchDatastore_Task, 'duration_secs': 0.013007} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.032666] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-394a5ccf-4386-4e77-9a95-a6e758cf2255 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.039514] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 877.039514] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52598d93-8a48-8146-4c9d-0f98967724bb" [ 877.039514] env[63345]: _type = "Task" [ 877.039514] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.049832] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52598d93-8a48-8146-4c9d-0f98967724bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.334846] env[63345]: DEBUG nova.compute.utils [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 877.336403] env[63345]: DEBUG nova.compute.manager [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 877.336581] env[63345]: DEBUG nova.network.neutron [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 877.406277] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017330, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.417228] env[63345]: DEBUG nova.policy [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f908b41c13824ddeb5fa5648e8750aa9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34efcd7d600f49698c6619be002d838f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 877.499712] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017331, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.552132] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52598d93-8a48-8146-4c9d-0f98967724bb, 'name': SearchDatastore_Task, 'duration_secs': 0.027895} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.555457] env[63345]: DEBUG oslo_concurrency.lockutils [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.555874] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6/ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 877.557459] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e57985e-268b-439e-a032-3b194bc45109 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.566931] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 877.566931] env[63345]: value = "task-1017332" [ 877.566931] env[63345]: _type = "Task" [ 877.566931] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.583958] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017332, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.606558] env[63345]: DEBUG nova.compute.manager [req-f11e32c9-f9d6-485a-8d1e-2b24058c4a14 req-3067a0ee-89be-4baa-8f6f-dd1cd72c59f2 service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Received event network-changed-cf06de95-5747-4226-b66c-b9ccca47321d {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 877.607037] env[63345]: DEBUG nova.compute.manager [req-f11e32c9-f9d6-485a-8d1e-2b24058c4a14 req-3067a0ee-89be-4baa-8f6f-dd1cd72c59f2 service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Refreshing instance network info cache due to event network-changed-cf06de95-5747-4226-b66c-b9ccca47321d. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 877.607195] env[63345]: DEBUG oslo_concurrency.lockutils [req-f11e32c9-f9d6-485a-8d1e-2b24058c4a14 req-3067a0ee-89be-4baa-8f6f-dd1cd72c59f2 service nova] Acquiring lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.607571] env[63345]: DEBUG oslo_concurrency.lockutils [req-f11e32c9-f9d6-485a-8d1e-2b24058c4a14 req-3067a0ee-89be-4baa-8f6f-dd1cd72c59f2 service nova] Acquired lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.607571] env[63345]: DEBUG nova.network.neutron [req-f11e32c9-f9d6-485a-8d1e-2b24058c4a14 req-3067a0ee-89be-4baa-8f6f-dd1cd72c59f2 service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Refreshing network info cache for port cf06de95-5747-4226-b66c-b9ccca47321d {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 877.783566] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4a7e71-3ebc-420f-9df3-cbf2ec68c037 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.793924] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530a7996-4363-4321-89f0-72545677789b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.841884] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83cec49-2f38-4d56-b23a-d988ee44bc34 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.845344] env[63345]: DEBUG nova.compute.manager [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 877.859541] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25569682-a7d0-4d5b-bae2-6818fc427551 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.879717] env[63345]: DEBUG nova.compute.provider_tree [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.903203] env[63345]: DEBUG oslo_vmware.api [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017330, 'name': PowerOnVM_Task, 'duration_secs': 0.829968} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.903203] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 877.903203] env[63345]: INFO nova.compute.manager [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Took 9.25 seconds to spawn the instance on the hypervisor. [ 877.903203] env[63345]: DEBUG nova.compute.manager [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 877.904040] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf816ffc-bac1-4cfb-8128-a3c9daba29e0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.995525] env[63345]: DEBUG oslo_vmware.api [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017331, 'name': PowerOnVM_Task, 'duration_secs': 0.873677} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.995891] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 877.996184] env[63345]: DEBUG nova.compute.manager [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 877.997127] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725171b8-cab7-41e9-8dfd-795078ce192f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.016082] env[63345]: DEBUG nova.network.neutron [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Successfully created port: f2c021c6-dbd4-40da-80c8-19678be6d78c {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 878.079556] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017332, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.361845] env[63345]: DEBUG nova.network.neutron [req-f11e32c9-f9d6-485a-8d1e-2b24058c4a14 req-3067a0ee-89be-4baa-8f6f-dd1cd72c59f2 service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Updated VIF entry in instance network info cache for port cf06de95-5747-4226-b66c-b9ccca47321d. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 878.362262] env[63345]: DEBUG nova.network.neutron [req-f11e32c9-f9d6-485a-8d1e-2b24058c4a14 req-3067a0ee-89be-4baa-8f6f-dd1cd72c59f2 service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Updating instance_info_cache with network_info: [{"id": "cf06de95-5747-4226-b66c-b9ccca47321d", "address": "fa:16:3e:9e:46:44", "network": {"id": "403ac06e-e45e-4215-bf0c-16ddd583ddc5", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1349318740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac5c2a653dae436c97514507939c4e3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapcf06de95-57", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.385333] env[63345]: DEBUG nova.scheduler.client.report [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 878.422862] env[63345]: INFO nova.compute.manager [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Took 38.06 seconds to build instance. [ 878.520763] env[63345]: DEBUG oslo_concurrency.lockutils [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.579322] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017332, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.747329} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.579637] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6/ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 878.580021] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 878.580166] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4744982b-0475-4b1f-a690-20abfdad18e2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.587590] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 878.587590] env[63345]: value = "task-1017333" [ 878.587590] env[63345]: _type = "Task" [ 878.587590] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.596345] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017333, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.793177] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "070a834d-6478-4705-8df0-2a27c8780507" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.855342] env[63345]: DEBUG nova.compute.manager [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 878.864505] env[63345]: DEBUG oslo_concurrency.lockutils [req-f11e32c9-f9d6-485a-8d1e-2b24058c4a14 req-3067a0ee-89be-4baa-8f6f-dd1cd72c59f2 service nova] Releasing lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.884026] env[63345]: DEBUG nova.virt.hardware [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 878.884026] env[63345]: DEBUG nova.virt.hardware [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 878.884026] env[63345]: DEBUG nova.virt.hardware [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 878.884282] env[63345]: DEBUG nova.virt.hardware [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 878.884360] env[63345]: DEBUG nova.virt.hardware [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 878.884504] env[63345]: DEBUG nova.virt.hardware [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 878.884717] env[63345]: DEBUG nova.virt.hardware [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 878.884882] env[63345]: DEBUG nova.virt.hardware [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 878.885061] env[63345]: DEBUG nova.virt.hardware [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 878.885228] env[63345]: DEBUG nova.virt.hardware [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 878.885402] env[63345]: DEBUG nova.virt.hardware [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 878.886273] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b85f5f0b-0b67-4ac1-840c-1c8d80f6a143 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.894185] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.063s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.896404] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.503s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.897899] env[63345]: INFO nova.compute.claims [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 878.902043] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf70f17-8458-408c-a8a5-0c02ac746e61 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.919465] env[63345]: INFO nova.scheduler.client.report [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleted allocations for instance 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f [ 878.925454] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3b6903cd-713f-495a-8e61-d009c6d102a0 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "14198777-9091-4c69-8928-c83135acc7d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.565s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.097468] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017333, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072374} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.097799] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 879.098636] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee96c849-fc52-497e-b75c-b85fa4c4b73b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.119685] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6/ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 879.120011] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10c84cd1-47cf-40fb-8549-1563816a5949 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.141415] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 879.141415] env[63345]: value = "task-1017334" [ 879.141415] env[63345]: _type = "Task" [ 879.141415] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.150233] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017334, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.429227] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3e9dc5e8-6ea8-4344-96fb-249ec931b755 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.229s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.652494] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017334, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.058947] env[63345]: INFO nova.compute.manager [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Rebuilding instance [ 880.122256] env[63345]: DEBUG nova.compute.manager [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 880.123186] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5550830-b4d7-4563-9f09-2280ee3260c0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.153560] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017334, 'name': ReconfigVM_Task, 'duration_secs': 0.769156} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.158032] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Reconfigured VM instance instance-00000050 to attach disk [datastore1] ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6/ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 880.158032] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a16b6db8-a2a7-4e33-91a8-c550ca65cf20 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.165087] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 880.165087] env[63345]: value = "task-1017335" [ 880.165087] env[63345]: _type = "Task" [ 880.165087] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.175320] env[63345]: DEBUG nova.compute.manager [req-68853dac-80bc-40a7-8b91-ae2f704dc999 req-fbfed2d2-8ef2-4f08-9212-b452050609ee service nova] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Received event network-vif-plugged-f2c021c6-dbd4-40da-80c8-19678be6d78c {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 880.175535] env[63345]: DEBUG oslo_concurrency.lockutils [req-68853dac-80bc-40a7-8b91-ae2f704dc999 req-fbfed2d2-8ef2-4f08-9212-b452050609ee service nova] Acquiring lock "a415d4f2-abc7-4553-8442-312316e686b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.177026] env[63345]: DEBUG oslo_concurrency.lockutils [req-68853dac-80bc-40a7-8b91-ae2f704dc999 req-fbfed2d2-8ef2-4f08-9212-b452050609ee service nova] Lock "a415d4f2-abc7-4553-8442-312316e686b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.177026] env[63345]: DEBUG oslo_concurrency.lockutils [req-68853dac-80bc-40a7-8b91-ae2f704dc999 req-fbfed2d2-8ef2-4f08-9212-b452050609ee service nova] Lock "a415d4f2-abc7-4553-8442-312316e686b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.177026] env[63345]: DEBUG nova.compute.manager [req-68853dac-80bc-40a7-8b91-ae2f704dc999 req-fbfed2d2-8ef2-4f08-9212-b452050609ee service nova] [instance: a415d4f2-abc7-4553-8442-312316e686b2] No waiting events found dispatching network-vif-plugged-f2c021c6-dbd4-40da-80c8-19678be6d78c {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 880.177026] env[63345]: WARNING nova.compute.manager [req-68853dac-80bc-40a7-8b91-ae2f704dc999 req-fbfed2d2-8ef2-4f08-9212-b452050609ee service nova] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Received unexpected event network-vif-plugged-f2c021c6-dbd4-40da-80c8-19678be6d78c for instance with vm_state building and task_state spawning. [ 880.181215] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017335, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.207893] env[63345]: DEBUG nova.network.neutron [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Successfully updated port: f2c021c6-dbd4-40da-80c8-19678be6d78c {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 880.439977] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30deca94-1bb1-4cd2-b87e-d5add9bcfb0d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.448847] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c9a51c-6b26-4257-9f53-decff238d083 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.482764] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe49d9a1-0eb1-47e6-ba07-ba1d96032c37 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.491570] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7edf1230-6e72-4474-864c-af8b3ee27078 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.506833] env[63345]: DEBUG nova.compute.provider_tree [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.679866] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017335, 'name': Rename_Task, 'duration_secs': 0.201331} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.684382] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 880.685051] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57f1f80e-07be-4b46-b8fd-0adc53647858 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.695273] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 880.695273] env[63345]: value = "task-1017336" [ 880.695273] env[63345]: _type = "Task" [ 880.695273] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.710216] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017336, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.714796] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "refresh_cache-a415d4f2-abc7-4553-8442-312316e686b2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.715044] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquired lock "refresh_cache-a415d4f2-abc7-4553-8442-312316e686b2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.715251] env[63345]: DEBUG nova.network.neutron [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 880.757737] env[63345]: DEBUG oslo_vmware.rw_handles [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ce0b59-aae8-839b-3975-7ad6fdc485f6/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 880.758766] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211b5f3c-c5e0-47ec-9451-afe9908e5d2d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.766795] env[63345]: DEBUG oslo_vmware.rw_handles [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ce0b59-aae8-839b-3975-7ad6fdc485f6/disk-0.vmdk is in state: ready. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 880.766976] env[63345]: ERROR oslo_vmware.rw_handles [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ce0b59-aae8-839b-3975-7ad6fdc485f6/disk-0.vmdk due to incomplete transfer. [ 880.767230] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0d515e17-3cc5-47a5-aa0b-e81d3b85c858 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.775757] env[63345]: DEBUG oslo_vmware.rw_handles [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ce0b59-aae8-839b-3975-7ad6fdc485f6/disk-0.vmdk. {{(pid=63345) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 880.775966] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Uploaded image 487d34b5-b85c-4d35-8b15-fd7347b3dcfd to the Glance image server {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 880.777638] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Destroying the VM {{(pid=63345) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 880.777908] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-812eaf39-e02a-48b9-8659-550217856a26 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.786690] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 880.786690] env[63345]: value = "task-1017337" [ 880.786690] env[63345]: _type = "Task" [ 880.786690] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.797301] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017337, 'name': Destroy_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.010549] env[63345]: DEBUG nova.scheduler.client.report [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 881.141662] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 881.142035] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1cfa5ab-e7ad-4805-ae29-515f2608a256 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.151381] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 881.151381] env[63345]: value = "task-1017338" [ 881.151381] env[63345]: _type = "Task" [ 881.151381] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.161133] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017338, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.207155] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017336, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.273384] env[63345]: DEBUG nova.network.neutron [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 881.300686] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017337, 'name': Destroy_Task} progress is 33%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.513713] env[63345]: DEBUG nova.network.neutron [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Updating instance_info_cache with network_info: [{"id": "f2c021c6-dbd4-40da-80c8-19678be6d78c", "address": "fa:16:3e:ae:e4:3b", "network": {"id": "dc725254-60a8-4edc-aab2-604dfb70677d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1100061234-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "34efcd7d600f49698c6619be002d838f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2c021c6-db", "ovs_interfaceid": "f2c021c6-dbd4-40da-80c8-19678be6d78c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.515084] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.515745] env[63345]: DEBUG nova.compute.manager [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 881.518344] env[63345]: DEBUG oslo_concurrency.lockutils [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.377s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.518770] env[63345]: DEBUG nova.objects.instance [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Lazy-loading 'resources' on Instance uuid a3f34e0e-2969-406f-a086-a925549e458e {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 881.661602] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017338, 'name': PowerOffVM_Task, 'duration_secs': 0.248009} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.661965] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 881.662366] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 881.663019] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67d1fd2-e552-4687-86ca-27826220d066 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.671146] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 881.671526] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e9e5e1c6-1f38-4134-a64c-e7153e5920f1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.709369] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017336, 'name': PowerOnVM_Task} progress is 71%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.762409] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 881.762734] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 881.762986] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Deleting the datastore file [datastore2] 3a85df04-3997-48a3-8992-f24fe997b3cc {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 881.763477] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-16bc7b4a-6107-4f68-a356-6c54a2b531aa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.772685] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 881.772685] env[63345]: value = "task-1017340" [ 881.772685] env[63345]: _type = "Task" [ 881.772685] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.782221] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "14198777-9091-4c69-8928-c83135acc7d2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.782529] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "14198777-9091-4c69-8928-c83135acc7d2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.782788] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "14198777-9091-4c69-8928-c83135acc7d2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.783027] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "14198777-9091-4c69-8928-c83135acc7d2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.783555] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "14198777-9091-4c69-8928-c83135acc7d2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.785111] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017340, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.790017] env[63345]: INFO nova.compute.manager [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Terminating instance [ 881.801461] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017337, 'name': Destroy_Task, 'duration_secs': 0.757267} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.805019] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Destroyed the VM [ 881.805019] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Deleting Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 881.805019] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f744e8b6-e87b-417f-ac18-14ffbd8fce8c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.812229] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 881.812229] env[63345]: value = "task-1017341" [ 881.812229] env[63345]: _type = "Task" [ 881.812229] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.824493] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017341, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.015762] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Releasing lock "refresh_cache-a415d4f2-abc7-4553-8442-312316e686b2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.016135] env[63345]: DEBUG nova.compute.manager [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Instance network_info: |[{"id": "f2c021c6-dbd4-40da-80c8-19678be6d78c", "address": "fa:16:3e:ae:e4:3b", "network": {"id": "dc725254-60a8-4edc-aab2-604dfb70677d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1100061234-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "34efcd7d600f49698c6619be002d838f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2c021c6-db", "ovs_interfaceid": "f2c021c6-dbd4-40da-80c8-19678be6d78c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 882.016609] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:e4:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b00fe87c-d828-442f-bd09-e9018c468557', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f2c021c6-dbd4-40da-80c8-19678be6d78c', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 882.026094] env[63345]: DEBUG oslo.service.loopingcall [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 882.026815] env[63345]: DEBUG nova.compute.utils [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 882.033113] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 882.033113] env[63345]: DEBUG nova.compute.manager [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 882.033113] env[63345]: DEBUG nova.network.neutron [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 882.033429] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-902aa823-0d88-466c-a8e7-8b54eea448d5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.059629] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 882.059629] env[63345]: value = "task-1017342" [ 882.059629] env[63345]: _type = "Task" [ 882.059629] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.069429] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017342, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.123677] env[63345]: DEBUG nova.policy [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6055500166344214a404427722503338', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dfc1248fb5ee4f798b6c59154d4cf623', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 882.210558] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017336, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.211832] env[63345]: DEBUG nova.compute.manager [req-208574e6-8e44-4f43-aae8-64d8a5df28cf req-058a23f3-f00b-4f91-9b98-026a01b05398 service nova] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Received event network-changed-f2c021c6-dbd4-40da-80c8-19678be6d78c {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 882.212037] env[63345]: DEBUG nova.compute.manager [req-208574e6-8e44-4f43-aae8-64d8a5df28cf req-058a23f3-f00b-4f91-9b98-026a01b05398 service nova] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Refreshing instance network info cache due to event network-changed-f2c021c6-dbd4-40da-80c8-19678be6d78c. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 882.215062] env[63345]: DEBUG oslo_concurrency.lockutils [req-208574e6-8e44-4f43-aae8-64d8a5df28cf req-058a23f3-f00b-4f91-9b98-026a01b05398 service nova] Acquiring lock "refresh_cache-a415d4f2-abc7-4553-8442-312316e686b2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.215062] env[63345]: DEBUG oslo_concurrency.lockutils [req-208574e6-8e44-4f43-aae8-64d8a5df28cf req-058a23f3-f00b-4f91-9b98-026a01b05398 service nova] Acquired lock "refresh_cache-a415d4f2-abc7-4553-8442-312316e686b2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.215062] env[63345]: DEBUG nova.network.neutron [req-208574e6-8e44-4f43-aae8-64d8a5df28cf req-058a23f3-f00b-4f91-9b98-026a01b05398 service nova] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Refreshing network info cache for port f2c021c6-dbd4-40da-80c8-19678be6d78c {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 882.283073] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017340, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173869} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.285696] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 882.285923] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 882.286131] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 882.293850] env[63345]: DEBUG nova.compute.manager [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 882.295109] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 882.295109] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a2041a-7900-4a09-9e6e-2efcbd97d554 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.303711] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 882.306748] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d472ce5f-d2d3-43f2-b891-d4aee30e88b4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.319622] env[63345]: DEBUG oslo_vmware.api [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 882.319622] env[63345]: value = "task-1017343" [ 882.319622] env[63345]: _type = "Task" [ 882.319622] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.326342] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017341, 'name': RemoveSnapshot_Task, 'duration_secs': 0.348462} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.327263] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Deleted Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 882.327508] env[63345]: DEBUG nova.compute.manager [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 882.328645] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbf8c0c-5bd4-4f0a-ae6b-4736d47979ab {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.339996] env[63345]: DEBUG oslo_vmware.api [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017343, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.475020] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660fff78-8480-4f6d-a4c2-54ef440e17c9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.490498] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b21eb1c1-54fc-430e-b356-0fe323cdcc97 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.540825] env[63345]: DEBUG nova.compute.manager [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 882.541804] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c8a879-377b-4653-9ccd-de4738dccbae {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.551977] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de9b30c-fff9-4d4f-9b9c-71a612b05bcb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.567419] env[63345]: DEBUG nova.compute.provider_tree [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.577799] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017342, 'name': CreateVM_Task, 'duration_secs': 0.473352} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.578084] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 882.578842] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.579054] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.579416] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 882.579798] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62e85937-9e8f-4d98-ab43-58ddab7117b8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.586023] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 882.586023] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52eb237c-025e-66ac-58d3-a65cab97aaff" [ 882.586023] env[63345]: _type = "Task" [ 882.586023] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.597020] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52eb237c-025e-66ac-58d3-a65cab97aaff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.640539] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "4868a0a0-ca35-44b0-a90c-124aa366af76" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.640776] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "4868a0a0-ca35-44b0-a90c-124aa366af76" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.709191] env[63345]: DEBUG oslo_vmware.api [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017336, 'name': PowerOnVM_Task, 'duration_secs': 1.579711} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.709521] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 882.709740] env[63345]: INFO nova.compute.manager [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Took 7.39 seconds to spawn the instance on the hypervisor. [ 882.709925] env[63345]: DEBUG nova.compute.manager [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 882.710726] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801463fd-8b72-48ac-8650-3668a343e112 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.725019] env[63345]: DEBUG nova.network.neutron [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Successfully created port: a8449910-e73e-4fd5-a8c3-8833ab272413 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 882.833743] env[63345]: DEBUG oslo_vmware.api [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017343, 'name': PowerOffVM_Task, 'duration_secs': 0.235915} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.834031] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 882.834212] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 882.834458] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7773e12a-442a-4058-9d87-5ec7f024e92c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.849581] env[63345]: INFO nova.compute.manager [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Shelve offloading [ 882.916169] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 882.916413] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 882.916715] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Deleting the datastore file [datastore2] 14198777-9091-4c69-8928-c83135acc7d2 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 882.916867] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-830aa6cc-ea29-444f-97e2-cf2a9d9bdcab {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.924092] env[63345]: DEBUG oslo_vmware.api [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 882.924092] env[63345]: value = "task-1017345" [ 882.924092] env[63345]: _type = "Task" [ 882.924092] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.934334] env[63345]: DEBUG oslo_vmware.api [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017345, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.945501] env[63345]: DEBUG nova.network.neutron [req-208574e6-8e44-4f43-aae8-64d8a5df28cf req-058a23f3-f00b-4f91-9b98-026a01b05398 service nova] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Updated VIF entry in instance network info cache for port f2c021c6-dbd4-40da-80c8-19678be6d78c. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 882.945961] env[63345]: DEBUG nova.network.neutron [req-208574e6-8e44-4f43-aae8-64d8a5df28cf req-058a23f3-f00b-4f91-9b98-026a01b05398 service nova] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Updating instance_info_cache with network_info: [{"id": "f2c021c6-dbd4-40da-80c8-19678be6d78c", "address": "fa:16:3e:ae:e4:3b", "network": {"id": "dc725254-60a8-4edc-aab2-604dfb70677d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1100061234-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "34efcd7d600f49698c6619be002d838f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2c021c6-db", "ovs_interfaceid": "f2c021c6-dbd4-40da-80c8-19678be6d78c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.073297] env[63345]: DEBUG nova.scheduler.client.report [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 883.098850] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52eb237c-025e-66ac-58d3-a65cab97aaff, 'name': SearchDatastore_Task, 'duration_secs': 0.011608} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.099182] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.099441] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 883.099687] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.099840] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.100036] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 883.100300] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2eb205a-df56-4fc1-93c8-be76ca7a172e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.110524] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 883.110817] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 883.111812] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df9ede6a-66df-4085-8f0b-9f72899cf322 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.118553] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 883.118553] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c98628-c75b-63ae-57a2-baa10b12ed59" [ 883.118553] env[63345]: _type = "Task" [ 883.118553] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.129245] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c98628-c75b-63ae-57a2-baa10b12ed59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.144808] env[63345]: DEBUG nova.compute.manager [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 883.237208] env[63345]: INFO nova.compute.manager [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Took 35.52 seconds to build instance. [ 883.324744] env[63345]: DEBUG nova.virt.hardware [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 883.325063] env[63345]: DEBUG nova.virt.hardware [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 883.325370] env[63345]: DEBUG nova.virt.hardware [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 883.325589] env[63345]: DEBUG nova.virt.hardware [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 883.325589] env[63345]: DEBUG nova.virt.hardware [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 883.325866] env[63345]: DEBUG nova.virt.hardware [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 883.325996] env[63345]: DEBUG nova.virt.hardware [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 883.326103] env[63345]: DEBUG nova.virt.hardware [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 883.326289] env[63345]: DEBUG nova.virt.hardware [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 883.326522] env[63345]: DEBUG nova.virt.hardware [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 883.326666] env[63345]: DEBUG nova.virt.hardware [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 883.327631] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece21876-460f-4427-bb9a-2f8850bd0a85 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.337599] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5241b40c-c22f-45c7-a5e7-b9d8f1ac4051 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.353338] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 883.353975] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:c7:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bd3c6b64-aba2-4bdc-a693-3b4dff3ed861', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a44ad561-3547-45fd-a941-c72ff5211989', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 883.361651] env[63345]: DEBUG oslo.service.loopingcall [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 883.361798] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-19fa3c3f-742d-4f22-a370-c9253ea177f6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.363664] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 883.363946] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c566b57-0cd3-4fd7-b96f-9fd95fdae54e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.386541] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 883.386541] env[63345]: value = "task-1017347" [ 883.386541] env[63345]: _type = "Task" [ 883.386541] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.390892] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 883.390892] env[63345]: value = "task-1017346" [ 883.390892] env[63345]: _type = "Task" [ 883.390892] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.397425] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017347, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.402944] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] VM already powered off {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 883.403228] env[63345]: DEBUG nova.compute.manager [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 883.404078] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e761b9-7671-422e-83f7-87fcce258d71 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.409828] env[63345]: DEBUG oslo_concurrency.lockutils [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.410008] env[63345]: DEBUG oslo_concurrency.lockutils [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquired lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.410192] env[63345]: DEBUG nova.network.neutron [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 883.433953] env[63345]: DEBUG oslo_vmware.api [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017345, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135425} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.434228] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 883.434416] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 883.434597] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 883.434773] env[63345]: INFO nova.compute.manager [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 883.435009] env[63345]: DEBUG oslo.service.loopingcall [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 883.435207] env[63345]: DEBUG nova.compute.manager [-] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 883.435300] env[63345]: DEBUG nova.network.neutron [-] [instance: 14198777-9091-4c69-8928-c83135acc7d2] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 883.448898] env[63345]: DEBUG oslo_concurrency.lockutils [req-208574e6-8e44-4f43-aae8-64d8a5df28cf req-058a23f3-f00b-4f91-9b98-026a01b05398 service nova] Releasing lock "refresh_cache-a415d4f2-abc7-4553-8442-312316e686b2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.554387] env[63345]: DEBUG nova.compute.manager [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 883.578590] env[63345]: DEBUG oslo_concurrency.lockutils [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.060s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.583240] env[63345]: DEBUG nova.virt.hardware [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 883.583518] env[63345]: DEBUG nova.virt.hardware [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 883.583705] env[63345]: DEBUG nova.virt.hardware [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 883.583901] env[63345]: DEBUG nova.virt.hardware [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 883.584063] env[63345]: DEBUG nova.virt.hardware [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 883.584219] env[63345]: DEBUG nova.virt.hardware [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 883.584446] env[63345]: DEBUG nova.virt.hardware [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 883.584609] env[63345]: DEBUG nova.virt.hardware [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 883.584781] env[63345]: DEBUG nova.virt.hardware [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 883.584963] env[63345]: DEBUG nova.virt.hardware [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 883.585199] env[63345]: DEBUG nova.virt.hardware [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 883.585848] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.493s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.586118] env[63345]: DEBUG nova.objects.instance [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lazy-loading 'resources' on Instance uuid 6cbe136b-5bf6-4f17-bcef-b712d850615f {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 883.588233] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85dcf215-e958-4fb9-a304-8d54636ce12f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.597252] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2d41e0-83ec-409a-9ddc-cd0a81a82daa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.614179] env[63345]: INFO nova.scheduler.client.report [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Deleted allocations for instance a3f34e0e-2969-406f-a086-a925549e458e [ 883.633214] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c98628-c75b-63ae-57a2-baa10b12ed59, 'name': SearchDatastore_Task, 'duration_secs': 0.009348} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.633360] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9696ac3-8526-44c2-b485-98ddb3ad3c15 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.635916] env[63345]: INFO nova.compute.manager [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Rebuilding instance [ 883.644539] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 883.644539] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529941d0-06f2-831b-4982-99f389a96195" [ 883.644539] env[63345]: _type = "Task" [ 883.644539] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.658989] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]529941d0-06f2-831b-4982-99f389a96195, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.685706] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.692755] env[63345]: DEBUG nova.compute.manager [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 883.692755] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1558189-6d74-4f85-ad33-2961e09c9a61 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.717449] env[63345]: DEBUG nova.compute.manager [req-d6ae08ce-3e84-45a9-b9e7-9ce58968d9ff req-9cba3230-5a9a-4888-9ec2-3a2d4d5bbad5 service nova] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Received event network-vif-deleted-5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 883.717631] env[63345]: INFO nova.compute.manager [req-d6ae08ce-3e84-45a9-b9e7-9ce58968d9ff req-9cba3230-5a9a-4888-9ec2-3a2d4d5bbad5 service nova] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Neutron deleted interface 5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d; detaching it from the instance and deleting it from the info cache [ 883.717741] env[63345]: DEBUG nova.network.neutron [req-d6ae08ce-3e84-45a9-b9e7-9ce58968d9ff req-9cba3230-5a9a-4888-9ec2-3a2d4d5bbad5 service nova] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.740301] env[63345]: DEBUG oslo_concurrency.lockutils [None req-cf151d24-85c5-40ae-9574-ef2fd3dae985 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Lock "ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.034s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.897481] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017347, 'name': CreateVM_Task, 'duration_secs': 0.32975} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.897595] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 883.898301] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.898488] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.898815] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 883.899083] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88c6ea47-2a13-4322-906a-5a5673b183f5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.904128] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 883.904128] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]524b4323-f0a9-e198-a757-98f7749b763e" [ 883.904128] env[63345]: _type = "Task" [ 883.904128] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.915745] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524b4323-f0a9-e198-a757-98f7749b763e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.123030] env[63345]: DEBUG nova.network.neutron [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Updating instance_info_cache with network_info: [{"id": "025d1e18-19a3-43ce-9db9-1590137a5544", "address": "fa:16:3e:9b:36:a9", "network": {"id": "95d95c9b-b21c-4ee5-ab54-d0bf2699d38e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-88421441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba08f64c26d245a8b8f2b52ea97c2f1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap025d1e18-19", "ovs_interfaceid": "025d1e18-19a3-43ce-9db9-1590137a5544", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.124293] env[63345]: DEBUG oslo_concurrency.lockutils [None req-592b2fb5-a61e-4a1a-997e-37d0f22b977e tempest-ServerAddressesNegativeTestJSON-1416275848 tempest-ServerAddressesNegativeTestJSON-1416275848-project-member] Lock "a3f34e0e-2969-406f-a086-a925549e458e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.085s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.156316] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]529941d0-06f2-831b-4982-99f389a96195, 'name': SearchDatastore_Task, 'duration_secs': 0.017967} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.158635] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.158911] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] a415d4f2-abc7-4553-8442-312316e686b2/a415d4f2-abc7-4553-8442-312316e686b2.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 884.159784] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-894709f2-bf41-435d-8fff-42367395b4d6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.166748] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 884.166748] env[63345]: value = "task-1017348" [ 884.166748] env[63345]: _type = "Task" [ 884.166748] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.177666] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017348, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.195590] env[63345]: DEBUG nova.network.neutron [-] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.222652] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4fbc2daf-0ad6-4226-b583-768ee7ea8c25 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.233213] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c29c24e-23fa-4b8e-8824-bc5d44eac62d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.268571] env[63345]: DEBUG nova.compute.manager [req-d6ae08ce-3e84-45a9-b9e7-9ce58968d9ff req-9cba3230-5a9a-4888-9ec2-3a2d4d5bbad5 service nova] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Detach interface failed, port_id=5a1f46e9-1557-425d-9dc3-c11b0fcf3f0d, reason: Instance 14198777-9091-4c69-8928-c83135acc7d2 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 884.423029] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524b4323-f0a9-e198-a757-98f7749b763e, 'name': SearchDatastore_Task, 'duration_secs': 0.015372} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.423029] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.423029] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 884.423029] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.423029] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.423029] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 884.423029] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b728d741-7a72-43e0-8fb0-0befb1f1b4c8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.440938] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 884.441194] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 884.442031] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cc5d27d-a80d-45fd-aead-bd8880b75c49 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.451524] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 884.451524] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5281162a-ed72-e544-ee17-6efc15c72865" [ 884.451524] env[63345]: _type = "Task" [ 884.451524] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.461978] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4cacead-8b8a-4f54-b54b-5168082ae464 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.467844] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5281162a-ed72-e544-ee17-6efc15c72865, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.475166] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2c3d65-d27c-475d-b849-68b37ac5825b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.511073] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5cbdbcf-e705-43d9-8aa9-6d3b819383aa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.520879] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89823f0e-3d94-46ec-8566-a68f4d3d5250 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.537895] env[63345]: DEBUG nova.compute.provider_tree [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.609570] env[63345]: DEBUG nova.compute.manager [req-d92821a4-b4d0-4808-b971-e6ed9e41efb6 req-0a2bc8ec-dfb2-4235-b04b-875dd4c2f603 service nova] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Received event network-vif-plugged-a8449910-e73e-4fd5-a8c3-8833ab272413 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 884.609928] env[63345]: DEBUG oslo_concurrency.lockutils [req-d92821a4-b4d0-4808-b971-e6ed9e41efb6 req-0a2bc8ec-dfb2-4235-b04b-875dd4c2f603 service nova] Acquiring lock "0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.610030] env[63345]: DEBUG oslo_concurrency.lockutils [req-d92821a4-b4d0-4808-b971-e6ed9e41efb6 req-0a2bc8ec-dfb2-4235-b04b-875dd4c2f603 service nova] Lock "0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.610215] env[63345]: DEBUG oslo_concurrency.lockutils [req-d92821a4-b4d0-4808-b971-e6ed9e41efb6 req-0a2bc8ec-dfb2-4235-b04b-875dd4c2f603 service nova] Lock "0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.610393] env[63345]: DEBUG nova.compute.manager [req-d92821a4-b4d0-4808-b971-e6ed9e41efb6 req-0a2bc8ec-dfb2-4235-b04b-875dd4c2f603 service nova] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] No waiting events found dispatching network-vif-plugged-a8449910-e73e-4fd5-a8c3-8833ab272413 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 884.610589] env[63345]: WARNING nova.compute.manager [req-d92821a4-b4d0-4808-b971-e6ed9e41efb6 req-0a2bc8ec-dfb2-4235-b04b-875dd4c2f603 service nova] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Received unexpected event network-vif-plugged-a8449910-e73e-4fd5-a8c3-8833ab272413 for instance with vm_state building and task_state spawning. [ 884.626030] env[63345]: DEBUG oslo_concurrency.lockutils [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Releasing lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.661044] env[63345]: DEBUG nova.network.neutron [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Successfully updated port: a8449910-e73e-4fd5-a8c3-8833ab272413 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 884.681366] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017348, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463857} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.681366] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] a415d4f2-abc7-4553-8442-312316e686b2/a415d4f2-abc7-4553-8442-312316e686b2.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 884.681366] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 884.681366] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2b5037f-20c0-401c-aa55-9a86b93d8ada {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.690690] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 884.690690] env[63345]: value = "task-1017349" [ 884.690690] env[63345]: _type = "Task" [ 884.690690] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.700367] env[63345]: INFO nova.compute.manager [-] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Took 1.26 seconds to deallocate network for instance. [ 884.700767] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017349, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.705912] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 884.706207] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec415ef6-50ed-482e-ad81-d886e56d90ec {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.717602] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 884.717602] env[63345]: value = "task-1017350" [ 884.717602] env[63345]: _type = "Task" [ 884.717602] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.729989] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.969919] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5281162a-ed72-e544-ee17-6efc15c72865, 'name': SearchDatastore_Task, 'duration_secs': 0.052298} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.970849] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86f175d9-353f-4829-9a57-50b7bcae606f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.977278] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 884.977278] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d3b82e-8685-3ca8-6ead-e7c06b97911d" [ 884.977278] env[63345]: _type = "Task" [ 884.977278] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.984461] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d3b82e-8685-3ca8-6ead-e7c06b97911d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.044648] env[63345]: DEBUG nova.scheduler.client.report [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 885.164142] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "refresh_cache-0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.164262] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquired lock "refresh_cache-0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.164501] env[63345]: DEBUG nova.network.neutron [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 885.200628] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017349, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06488} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.200898] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 885.201728] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815d7b5d-990b-46d0-87ce-049c01bf001d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.204866] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 885.205573] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82f51ca-d756-4707-9d78-35268b481f71 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.221645] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.221987] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 885.230867] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] a415d4f2-abc7-4553-8442-312316e686b2/a415d4f2-abc7-4553-8442-312316e686b2.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 885.231399] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f9f3e774-cc0f-4fe4-9df3-377b5a72e73c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.235188] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ecaee65-8a7c-4614-99b5-32d1eaf8b897 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.255243] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017350, 'name': PowerOffVM_Task, 'duration_secs': 0.149904} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.256451] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 885.256696] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 885.257026] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 885.257026] env[63345]: value = "task-1017352" [ 885.257026] env[63345]: _type = "Task" [ 885.257026] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.257753] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feea10bc-0deb-46af-b666-d6ce140f0fb3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.267019] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 885.269924] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-88d86a43-90b1-4faf-b3b8-41e04ec69beb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.271254] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017352, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.298938] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 885.299213] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Deleting contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 885.299482] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Deleting the datastore file [datastore1] ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 885.299804] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19bc003e-8748-4c61-8f3e-3be44f59a1f7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.308279] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 885.308279] env[63345]: value = "task-1017354" [ 885.308279] env[63345]: _type = "Task" [ 885.308279] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.318439] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017354, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.330709] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 885.331043] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 885.331313] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Deleting the datastore file [datastore2] 9aa651b8-317d-4153-8c33-9df0a5d16115 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 885.331692] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-174f321d-3f28-4fcb-b245-123a457067e3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.340716] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 885.340716] env[63345]: value = "task-1017355" [ 885.340716] env[63345]: _type = "Task" [ 885.340716] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.350084] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.489280] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d3b82e-8685-3ca8-6ead-e7c06b97911d, 'name': SearchDatastore_Task, 'duration_secs': 0.009554} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.489620] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.489932] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 3a85df04-3997-48a3-8992-f24fe997b3cc/3a85df04-3997-48a3-8992-f24fe997b3cc.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 885.490299] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a1731d9-5453-424b-8b4c-51d5fdfb7cb0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.499542] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 885.499542] env[63345]: value = "task-1017356" [ 885.499542] env[63345]: _type = "Task" [ 885.499542] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.508534] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017356, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.551794] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.966s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.554422] env[63345]: DEBUG oslo_concurrency.lockutils [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.947s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.554661] env[63345]: DEBUG nova.objects.instance [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lazy-loading 'resources' on Instance uuid 1e349d03-6cae-4322-9941-d48c52c21c0e {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 885.574954] env[63345]: INFO nova.scheduler.client.report [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Deleted allocations for instance 6cbe136b-5bf6-4f17-bcef-b712d850615f [ 885.712274] env[63345]: DEBUG nova.network.neutron [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 885.750946] env[63345]: DEBUG nova.compute.manager [req-a72b3132-7ea7-472e-9120-3d0070a880b5 req-5df0cc93-7c69-4f5b-8f72-cdc65e749944 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Received event network-vif-unplugged-025d1e18-19a3-43ce-9db9-1590137a5544 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 885.754127] env[63345]: DEBUG oslo_concurrency.lockutils [req-a72b3132-7ea7-472e-9120-3d0070a880b5 req-5df0cc93-7c69-4f5b-8f72-cdc65e749944 service nova] Acquiring lock "9aa651b8-317d-4153-8c33-9df0a5d16115-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.754127] env[63345]: DEBUG oslo_concurrency.lockutils [req-a72b3132-7ea7-472e-9120-3d0070a880b5 req-5df0cc93-7c69-4f5b-8f72-cdc65e749944 service nova] Lock "9aa651b8-317d-4153-8c33-9df0a5d16115-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.754127] env[63345]: DEBUG oslo_concurrency.lockutils [req-a72b3132-7ea7-472e-9120-3d0070a880b5 req-5df0cc93-7c69-4f5b-8f72-cdc65e749944 service nova] Lock "9aa651b8-317d-4153-8c33-9df0a5d16115-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.754127] env[63345]: DEBUG nova.compute.manager [req-a72b3132-7ea7-472e-9120-3d0070a880b5 req-5df0cc93-7c69-4f5b-8f72-cdc65e749944 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] No waiting events found dispatching network-vif-unplugged-025d1e18-19a3-43ce-9db9-1590137a5544 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 885.754127] env[63345]: WARNING nova.compute.manager [req-a72b3132-7ea7-472e-9120-3d0070a880b5 req-5df0cc93-7c69-4f5b-8f72-cdc65e749944 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Received unexpected event network-vif-unplugged-025d1e18-19a3-43ce-9db9-1590137a5544 for instance with vm_state shelved and task_state shelving_offloading. [ 885.754127] env[63345]: DEBUG nova.compute.manager [req-a72b3132-7ea7-472e-9120-3d0070a880b5 req-5df0cc93-7c69-4f5b-8f72-cdc65e749944 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Received event network-changed-025d1e18-19a3-43ce-9db9-1590137a5544 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 885.754127] env[63345]: DEBUG nova.compute.manager [req-a72b3132-7ea7-472e-9120-3d0070a880b5 req-5df0cc93-7c69-4f5b-8f72-cdc65e749944 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Refreshing instance network info cache due to event network-changed-025d1e18-19a3-43ce-9db9-1590137a5544. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 885.754127] env[63345]: DEBUG oslo_concurrency.lockutils [req-a72b3132-7ea7-472e-9120-3d0070a880b5 req-5df0cc93-7c69-4f5b-8f72-cdc65e749944 service nova] Acquiring lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.754127] env[63345]: DEBUG oslo_concurrency.lockutils [req-a72b3132-7ea7-472e-9120-3d0070a880b5 req-5df0cc93-7c69-4f5b-8f72-cdc65e749944 service nova] Acquired lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.754127] env[63345]: DEBUG nova.network.neutron [req-a72b3132-7ea7-472e-9120-3d0070a880b5 req-5df0cc93-7c69-4f5b-8f72-cdc65e749944 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Refreshing network info cache for port 025d1e18-19a3-43ce-9db9-1590137a5544 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 885.773988] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017352, 'name': ReconfigVM_Task, 'duration_secs': 0.333643} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.774348] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Reconfigured VM instance instance-00000051 to attach disk [datastore2] a415d4f2-abc7-4553-8442-312316e686b2/a415d4f2-abc7-4553-8442-312316e686b2.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.775556] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-91d326c5-9e2c-405f-8bdd-1c3022be5fbb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.788376] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 885.788376] env[63345]: value = "task-1017357" [ 885.788376] env[63345]: _type = "Task" [ 885.788376] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.800621] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017357, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.821563] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017354, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108236} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.821894] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 885.822148] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Deleted contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 885.822394] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 885.851428] env[63345]: DEBUG oslo_vmware.api [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017355, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1696} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.851740] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 885.851946] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 885.852159] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 885.876572] env[63345]: INFO nova.scheduler.client.report [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Deleted allocations for instance 9aa651b8-317d-4153-8c33-9df0a5d16115 [ 885.961471] env[63345]: DEBUG nova.network.neutron [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Updating instance_info_cache with network_info: [{"id": "a8449910-e73e-4fd5-a8c3-8833ab272413", "address": "fa:16:3e:56:21:f9", "network": {"id": "6adcb593-15d5-4959-9e09-f7794e033f9e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1117018512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfc1248fb5ee4f798b6c59154d4cf623", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8449910-e7", "ovs_interfaceid": "a8449910-e73e-4fd5-a8c3-8833ab272413", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.010286] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017356, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.088856] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5a4b2e2b-02a6-47e3-9c0b-e7b4be448d85 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "6cbe136b-5bf6-4f17-bcef-b712d850615f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.662s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.303278] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017357, 'name': Rename_Task, 'duration_secs': 0.274193} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.303787] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 886.304207] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a397656-6f19-4875-a797-53ce3487cb8a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.316027] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 886.316027] env[63345]: value = "task-1017358" [ 886.316027] env[63345]: _type = "Task" [ 886.316027] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.332196] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017358, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.382949] env[63345]: DEBUG oslo_concurrency.lockutils [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.463730] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Releasing lock "refresh_cache-0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.464137] env[63345]: DEBUG nova.compute.manager [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Instance network_info: |[{"id": "a8449910-e73e-4fd5-a8c3-8833ab272413", "address": "fa:16:3e:56:21:f9", "network": {"id": "6adcb593-15d5-4959-9e09-f7794e033f9e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1117018512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfc1248fb5ee4f798b6c59154d4cf623", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8449910-e7", "ovs_interfaceid": "a8449910-e73e-4fd5-a8c3-8833ab272413", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 886.464581] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:21:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '450939f7-f74b-41f7-93f7-b4fde6a6fbed', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8449910-e73e-4fd5-a8c3-8833ab272413', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 886.473237] env[63345]: DEBUG oslo.service.loopingcall [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 886.476141] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 886.476622] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c724f42-9425-4fa6-997e-4589ad59ad2d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.493960] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875c28ee-71c1-4435-be27-a99b8cd038dd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.505334] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 886.505334] env[63345]: value = "task-1017359" [ 886.505334] env[63345]: _type = "Task" [ 886.505334] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.506649] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725d8efd-5495-4261-b1d9-7b37af80fdcf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.517236] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017356, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533165} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.517876] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 3a85df04-3997-48a3-8992-f24fe997b3cc/3a85df04-3997-48a3-8992-f24fe997b3cc.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 886.522174] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 886.557023] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-96ff13a5-c601-457e-8467-ac3c6858c6f1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.560405] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392715c6-23b1-46a7-b826-17edca1b2d80 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.563451] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017359, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.571671] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad09374-9d64-40fc-90ab-4cd6da583bcd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.576967] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 886.576967] env[63345]: value = "task-1017360" [ 886.576967] env[63345]: _type = "Task" [ 886.576967] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.590469] env[63345]: DEBUG nova.compute.provider_tree [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 886.599696] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017360, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.653702] env[63345]: DEBUG nova.compute.manager [req-61b4760a-575e-44c0-95a1-da77f8a7add0 req-7889d69e-f989-4164-ae85-1f329663d73e service nova] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Received event network-changed-a8449910-e73e-4fd5-a8c3-8833ab272413 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 886.653975] env[63345]: DEBUG nova.compute.manager [req-61b4760a-575e-44c0-95a1-da77f8a7add0 req-7889d69e-f989-4164-ae85-1f329663d73e service nova] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Refreshing instance network info cache due to event network-changed-a8449910-e73e-4fd5-a8c3-8833ab272413. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 886.654202] env[63345]: DEBUG oslo_concurrency.lockutils [req-61b4760a-575e-44c0-95a1-da77f8a7add0 req-7889d69e-f989-4164-ae85-1f329663d73e service nova] Acquiring lock "refresh_cache-0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.654482] env[63345]: DEBUG oslo_concurrency.lockutils [req-61b4760a-575e-44c0-95a1-da77f8a7add0 req-7889d69e-f989-4164-ae85-1f329663d73e service nova] Acquired lock "refresh_cache-0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.654697] env[63345]: DEBUG nova.network.neutron [req-61b4760a-575e-44c0-95a1-da77f8a7add0 req-7889d69e-f989-4164-ae85-1f329663d73e service nova] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Refreshing network info cache for port a8449910-e73e-4fd5-a8c3-8833ab272413 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 886.827242] env[63345]: DEBUG oslo_vmware.api [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017358, 'name': PowerOnVM_Task, 'duration_secs': 0.489614} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.827242] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 886.827598] env[63345]: INFO nova.compute.manager [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Took 7.97 seconds to spawn the instance on the hypervisor. [ 886.827652] env[63345]: DEBUG nova.compute.manager [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 886.828466] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9315a2c-0096-4f48-9713-070e1c41c661 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.860383] env[63345]: DEBUG nova.virt.hardware [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 886.860663] env[63345]: DEBUG nova.virt.hardware [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 886.860881] env[63345]: DEBUG nova.virt.hardware [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 886.861131] env[63345]: DEBUG nova.virt.hardware [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 886.861304] env[63345]: DEBUG nova.virt.hardware [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 886.861495] env[63345]: DEBUG nova.virt.hardware [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 886.861747] env[63345]: DEBUG nova.virt.hardware [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 886.861933] env[63345]: DEBUG nova.virt.hardware [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 886.862233] env[63345]: DEBUG nova.virt.hardware [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 886.862388] env[63345]: DEBUG nova.virt.hardware [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 886.862525] env[63345]: DEBUG nova.virt.hardware [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 886.863444] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de92ef06-dae4-4ae6-96ad-eab78d5fb463 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.871230] env[63345]: DEBUG nova.network.neutron [req-a72b3132-7ea7-472e-9120-3d0070a880b5 req-5df0cc93-7c69-4f5b-8f72-cdc65e749944 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Updated VIF entry in instance network info cache for port 025d1e18-19a3-43ce-9db9-1590137a5544. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 886.871618] env[63345]: DEBUG nova.network.neutron [req-a72b3132-7ea7-472e-9120-3d0070a880b5 req-5df0cc93-7c69-4f5b-8f72-cdc65e749944 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Updating instance_info_cache with network_info: [{"id": "025d1e18-19a3-43ce-9db9-1590137a5544", "address": "fa:16:3e:9b:36:a9", "network": {"id": "95d95c9b-b21c-4ee5-ab54-d0bf2699d38e", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-88421441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba08f64c26d245a8b8f2b52ea97c2f1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap025d1e18-19", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.873711] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2ba018-414e-4031-ab21-f514c5b165c2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.889889] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Instance VIF info [] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 886.895505] env[63345]: DEBUG oslo.service.loopingcall [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 886.896724] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 886.896956] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85c971d7-eff3-42e6-b381-8316c6caefbf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.914387] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 886.914387] env[63345]: value = "task-1017361" [ 886.914387] env[63345]: _type = "Task" [ 886.914387] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.923096] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017361, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.024374] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017359, 'name': CreateVM_Task, 'duration_secs': 0.368352} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.024374] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 887.025308] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.026232] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.026232] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 887.026374] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c68659d7-600a-4514-a5be-2ebd176bd4c4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.032784] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 887.032784] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d77966-9c26-42d0-7162-25eba9fdbb5e" [ 887.032784] env[63345]: _type = "Task" [ 887.032784] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.046782] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d77966-9c26-42d0-7162-25eba9fdbb5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.087176] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017360, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065849} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.087462] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 887.088333] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf81dfd-c8b4-4e93-8026-f9aa62148039 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.103159] env[63345]: DEBUG nova.scheduler.client.report [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 887.115884] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] 3a85df04-3997-48a3-8992-f24fe997b3cc/3a85df04-3997-48a3-8992-f24fe997b3cc.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.116427] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59dda8b8-2d31-4362-a573-a7d9462b68cb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.140372] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 887.140372] env[63345]: value = "task-1017362" [ 887.140372] env[63345]: _type = "Task" [ 887.140372] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.149798] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017362, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.350987] env[63345]: INFO nova.compute.manager [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Took 37.09 seconds to build instance. [ 887.378315] env[63345]: DEBUG oslo_concurrency.lockutils [req-a72b3132-7ea7-472e-9120-3d0070a880b5 req-5df0cc93-7c69-4f5b-8f72-cdc65e749944 service nova] Releasing lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.425730] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017361, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.426792] env[63345]: DEBUG nova.network.neutron [req-61b4760a-575e-44c0-95a1-da77f8a7add0 req-7889d69e-f989-4164-ae85-1f329663d73e service nova] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Updated VIF entry in instance network info cache for port a8449910-e73e-4fd5-a8c3-8833ab272413. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 887.427193] env[63345]: DEBUG nova.network.neutron [req-61b4760a-575e-44c0-95a1-da77f8a7add0 req-7889d69e-f989-4164-ae85-1f329663d73e service nova] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Updating instance_info_cache with network_info: [{"id": "a8449910-e73e-4fd5-a8c3-8833ab272413", "address": "fa:16:3e:56:21:f9", "network": {"id": "6adcb593-15d5-4959-9e09-f7794e033f9e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1117018512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dfc1248fb5ee4f798b6c59154d4cf623", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8449910-e7", "ovs_interfaceid": "a8449910-e73e-4fd5-a8c3-8833ab272413", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.544492] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d77966-9c26-42d0-7162-25eba9fdbb5e, 'name': SearchDatastore_Task, 'duration_secs': 0.012176} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.544851] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.545068] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 887.545315] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.545507] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.545707] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 887.545986] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9118c642-e8b5-4d69-828d-e88dad4ed54c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.555112] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 887.555305] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 887.556036] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20d9f456-e997-4d6d-953f-dcb8ebbfe84f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.563564] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 887.563564] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52740e0c-9f94-eafb-ec40-787411ca8260" [ 887.563564] env[63345]: _type = "Task" [ 887.563564] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.572045] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52740e0c-9f94-eafb-ec40-787411ca8260, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.617515] env[63345]: DEBUG oslo_concurrency.lockutils [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.063s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.619940] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.100s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.620196] env[63345]: DEBUG nova.objects.instance [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Lazy-loading 'resources' on Instance uuid 805f9143-a8d8-4995-a20d-3b10ef3ab599 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 887.634417] env[63345]: INFO nova.scheduler.client.report [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleted allocations for instance 1e349d03-6cae-4322-9941-d48c52c21c0e [ 887.652348] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017362, 'name': ReconfigVM_Task, 'duration_secs': 0.279312} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.652683] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Reconfigured VM instance instance-00000038 to attach disk [datastore2] 3a85df04-3997-48a3-8992-f24fe997b3cc/3a85df04-3997-48a3-8992-f24fe997b3cc.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 887.653451] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6fc2f848-e832-4f8e-8956-e14c8df18d57 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.662117] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 887.662117] env[63345]: value = "task-1017363" [ 887.662117] env[63345]: _type = "Task" [ 887.662117] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.671594] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017363, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.853626] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd9ef99a-6c93-46d5-8893-1f179fc8dc33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "a415d4f2-abc7-4553-8442-312316e686b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.601s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.925624] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017361, 'name': CreateVM_Task, 'duration_secs': 0.984666} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.925843] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 887.926299] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.926472] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.926875] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 887.927147] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0c6bc8b-ada5-4b8d-9b28-5d280c4a2610 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.929444] env[63345]: DEBUG oslo_concurrency.lockutils [req-61b4760a-575e-44c0-95a1-da77f8a7add0 req-7889d69e-f989-4164-ae85-1f329663d73e service nova] Releasing lock "refresh_cache-0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.933606] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 887.933606] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52e5d4f6-d956-4b54-e900-882e8b0815fb" [ 887.933606] env[63345]: _type = "Task" [ 887.933606] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.942230] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e5d4f6-d956-4b54-e900-882e8b0815fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.074413] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52740e0c-9f94-eafb-ec40-787411ca8260, 'name': SearchDatastore_Task, 'duration_secs': 0.00911} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.075250] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fb41ce0-eb07-44b4-bba0-8fa009310f1b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.081217] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 888.081217] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5224e741-f3e5-8516-a3a0-95e651bc3446" [ 888.081217] env[63345]: _type = "Task" [ 888.081217] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.089325] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5224e741-f3e5-8516-a3a0-95e651bc3446, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.145484] env[63345]: DEBUG oslo_concurrency.lockutils [None req-eef5c25f-f6a5-4c1a-9b50-9a2ecaa961a4 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "1e349d03-6cae-4322-9941-d48c52c21c0e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.688s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.174834] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017363, 'name': Rename_Task, 'duration_secs': 0.149969} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.175136] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 888.175390] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31cac51b-4538-49de-9aca-4258c0b5a688 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.182563] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 888.182563] env[63345]: value = "task-1017364" [ 888.182563] env[63345]: _type = "Task" [ 888.182563] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.192486] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017364, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.399337] env[63345]: INFO nova.compute.manager [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Rescuing [ 888.399686] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "refresh_cache-a415d4f2-abc7-4553-8442-312316e686b2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.399897] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquired lock "refresh_cache-a415d4f2-abc7-4553-8442-312316e686b2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.400191] env[63345]: DEBUG nova.network.neutron [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 888.414699] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f45d5f-1f4a-4236-877e-46d28adf4e64 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.423802] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0adb81-c9db-4573-8a4b-437c085aedd2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.459571] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af311db7-760f-4460-8400-cb8fd764919a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.468500] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "9aa651b8-317d-4153-8c33-9df0a5d16115" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.468872] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e5d4f6-d956-4b54-e900-882e8b0815fb, 'name': SearchDatastore_Task, 'duration_secs': 0.014868} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.471429] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.471765] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 888.472011] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.473351] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd29c729-1dfa-4b93-b1b4-a8ecb88979b9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.487793] env[63345]: DEBUG nova.compute.provider_tree [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.591978] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5224e741-f3e5-8516-a3a0-95e651bc3446, 'name': SearchDatastore_Task, 'duration_secs': 0.009318} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.592243] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.592565] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01/0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 888.592878] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.593105] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 888.593344] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2615d251-cf97-4334-9095-694a73166543 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.595403] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59651cac-87b5-4407-a95f-8d381bf25bad {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.603524] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 888.603524] env[63345]: value = "task-1017365" [ 888.603524] env[63345]: _type = "Task" [ 888.603524] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.611574] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017365, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.628925] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 888.629142] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 888.629974] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c8e939d-7e3e-438b-9a9c-e7f8fc54c3d5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.638852] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 888.638852] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52250adb-faa7-bc7a-4ba8-38511ef4a780" [ 888.638852] env[63345]: _type = "Task" [ 888.638852] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.647766] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52250adb-faa7-bc7a-4ba8-38511ef4a780, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.693707] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017364, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.991127] env[63345]: DEBUG nova.scheduler.client.report [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 889.114532] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017365, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.123286] env[63345]: DEBUG nova.network.neutron [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Updating instance_info_cache with network_info: [{"id": "f2c021c6-dbd4-40da-80c8-19678be6d78c", "address": "fa:16:3e:ae:e4:3b", "network": {"id": "dc725254-60a8-4edc-aab2-604dfb70677d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1100061234-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "34efcd7d600f49698c6619be002d838f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2c021c6-db", "ovs_interfaceid": "f2c021c6-dbd4-40da-80c8-19678be6d78c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.150037] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52250adb-faa7-bc7a-4ba8-38511ef4a780, 'name': SearchDatastore_Task, 'duration_secs': 0.010389} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.150855] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d473f71e-1c98-4aaa-9e1a-e7ef7aef53f1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.157775] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 889.157775] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]527e9f71-7863-8312-e687-4ec8f4e680d0" [ 889.157775] env[63345]: _type = "Task" [ 889.157775] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.166835] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]527e9f71-7863-8312-e687-4ec8f4e680d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.194028] env[63345]: DEBUG oslo_vmware.api [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017364, 'name': PowerOnVM_Task, 'duration_secs': 0.529296} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.194163] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 889.194456] env[63345]: DEBUG nova.compute.manager [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 889.195330] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844d1c2c-ec65-431b-947e-58483f74e6eb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.500947] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.881s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.503384] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 23.914s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.525648] env[63345]: INFO nova.scheduler.client.report [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Deleted allocations for instance 805f9143-a8d8-4995-a20d-3b10ef3ab599 [ 889.578887] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "e5546a26-3f94-48a6-914a-2c37e63a0aeb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.579189] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "e5546a26-3f94-48a6-914a-2c37e63a0aeb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.615710] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017365, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.725988} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.615990] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01/0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 889.616692] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 889.616692] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fad09e99-0855-4489-b08d-f271ecbc59dc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.624211] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 889.624211] env[63345]: value = "task-1017366" [ 889.624211] env[63345]: _type = "Task" [ 889.624211] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.627770] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Releasing lock "refresh_cache-a415d4f2-abc7-4553-8442-312316e686b2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.636741] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017366, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.670220] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]527e9f71-7863-8312-e687-4ec8f4e680d0, 'name': SearchDatastore_Task, 'duration_secs': 0.052405} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.670505] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.670773] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6/ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 889.671059] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd04d241-00b5-4c9f-9b30-4895e67ae5cd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.679475] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 889.679475] env[63345]: value = "task-1017367" [ 889.679475] env[63345]: _type = "Task" [ 889.679475] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.688924] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017367, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.713173] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.037752] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a3c06927-7632-4fbb-a205-b02d5b82066d tempest-AttachInterfacesUnderV243Test-1651719437 tempest-AttachInterfacesUnderV243Test-1651719437-project-member] Lock "805f9143-a8d8-4995-a20d-3b10ef3ab599" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.357s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.087234] env[63345]: DEBUG nova.compute.manager [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 890.136266] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017366, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082481} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.139231] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 890.140980] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f84637-faa5-4cd4-8767-c7de8a71caf5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.164518] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01/0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 890.171014] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-230beb48-13bc-4aa0-830e-3759b0ba67f3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.195162] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017367, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.198808] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 890.198808] env[63345]: value = "task-1017368" [ 890.198808] env[63345]: _type = "Task" [ 890.198808] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.209171] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017368, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.372437] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca65d141-f3f6-454d-9ab6-afd80be34d66 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.380836] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d694f6b2-6214-4713-ab15-81e61e74c8df {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.413672] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442e2584-16f1-40b0-8fb1-c319ab7b99c4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.422451] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cba69ec-cc4a-46be-97a4-09b5da6a1e58 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.436586] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "75fc8365-bf8d-489e-935f-a5169c6a7e62" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.436842] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "75fc8365-bf8d-489e-935f-a5169c6a7e62" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.437064] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "75fc8365-bf8d-489e-935f-a5169c6a7e62-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.437260] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "75fc8365-bf8d-489e-935f-a5169c6a7e62-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.437440] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "75fc8365-bf8d-489e-935f-a5169c6a7e62-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.439313] env[63345]: DEBUG nova.compute.provider_tree [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 890.440605] env[63345]: INFO nova.compute.manager [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Terminating instance [ 890.609697] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.686227] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 890.690394] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-572cb9c0-5d01-4cfc-8257-45692d6fd2a5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.698641] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017367, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.703421] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 890.703421] env[63345]: value = "task-1017369" [ 890.703421] env[63345]: _type = "Task" [ 890.703421] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.714307] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017368, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.718682] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017369, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.944180] env[63345]: DEBUG nova.scheduler.client.report [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 890.948317] env[63345]: DEBUG nova.compute.manager [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 890.948704] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 890.949707] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e78f2e-5396-4567-806c-068d569b083c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.957929] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 890.958202] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1399609-5471-43d8-867f-dfa6b93722e2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.965447] env[63345]: DEBUG oslo_vmware.api [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 890.965447] env[63345]: value = "task-1017370" [ 890.965447] env[63345]: _type = "Task" [ 890.965447] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.975766] env[63345]: DEBUG oslo_vmware.api [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017370, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.196575] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017367, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.144862} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.197090] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6/ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 891.197090] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 891.197312] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-26bcd94b-8499-46e9-8a92-a366dad4b9fa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.206024] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 891.206024] env[63345]: value = "task-1017371" [ 891.206024] env[63345]: _type = "Task" [ 891.206024] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.216776] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017368, 'name': ReconfigVM_Task, 'duration_secs': 0.864493} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.217546] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01/0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 891.218563] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-006f5975-7ca8-492d-925c-ba80a8bcc72b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.223866] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017369, 'name': PowerOffVM_Task, 'duration_secs': 0.253568} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.227438] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 891.227879] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017371, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.228682] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8902b04d-5081-43f8-9c92-95a84f2f11ab {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.234409] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 891.234409] env[63345]: value = "task-1017372" [ 891.234409] env[63345]: _type = "Task" [ 891.234409] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.254656] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8537790b-af54-4a3c-9013-d32766efb980 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.263803] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017372, 'name': Rename_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.297050] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 891.297416] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4fa66fb4-af6f-4e62-9f23-b9bd1d5081c4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.305982] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 891.305982] env[63345]: value = "task-1017373" [ 891.305982] env[63345]: _type = "Task" [ 891.305982] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.320153] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017373, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.475802] env[63345]: DEBUG oslo_vmware.api [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017370, 'name': PowerOffVM_Task, 'duration_secs': 0.345484} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.476956] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 891.477198] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 891.477479] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c1c7a467-aa7e-4e3d-8f98-759baf904714 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.558695] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 891.558927] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 891.559124] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Deleting the datastore file [datastore2] 75fc8365-bf8d-489e-935f-a5169c6a7e62 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 891.559826] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a9daf39-1361-4868-9b21-8f390d9955a9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.567179] env[63345]: DEBUG oslo_vmware.api [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 891.567179] env[63345]: value = "task-1017375" [ 891.567179] env[63345]: _type = "Task" [ 891.567179] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.575840] env[63345]: DEBUG oslo_vmware.api [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017375, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.722189] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017371, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071539} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.722474] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 891.723352] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153d72fb-12a3-4ec4-ad9b-47f98f3aebb3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.743831] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6/ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 891.744498] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4758177b-4078-4ecb-a9ae-09bdd48e0bab {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.773341] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017372, 'name': Rename_Task, 'duration_secs': 0.217483} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.774626] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 891.775196] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 891.775196] env[63345]: value = "task-1017376" [ 891.775196] env[63345]: _type = "Task" [ 891.775196] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.776970] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf97acd6-9761-4dfa-9d86-c835498cf900 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.787976] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017376, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.789360] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 891.789360] env[63345]: value = "task-1017377" [ 891.789360] env[63345]: _type = "Task" [ 891.789360] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.797678] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017377, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.818159] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] VM already powered off {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 891.818465] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 891.818827] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.819041] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.819435] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 891.819690] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f8594fc-a39e-4165-bffb-3abfbda7d3d6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.845968] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 891.846281] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 891.847592] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec475ccc-60ed-40fd-bc09-47e7bb1021c3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.855404] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 891.855404] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b9b9db-aebc-c273-b6b0-f1cf8d11c4a8" [ 891.855404] env[63345]: _type = "Task" [ 891.855404] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.865083] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b9b9db-aebc-c273-b6b0-f1cf8d11c4a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.957021] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.452s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.959874] env[63345]: DEBUG oslo_concurrency.lockutils [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.813s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.967370] env[63345]: INFO nova.compute.claims [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 892.081021] env[63345]: DEBUG oslo_vmware.api [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017375, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.380269} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.081021] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 892.081021] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 892.081021] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 892.081021] env[63345]: INFO nova.compute.manager [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Took 1.13 seconds to destroy the instance on the hypervisor. [ 892.081021] env[63345]: DEBUG oslo.service.loopingcall [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 892.081021] env[63345]: DEBUG nova.compute.manager [-] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 892.081021] env[63345]: DEBUG nova.network.neutron [-] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 892.290768] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017376, 'name': ReconfigVM_Task, 'duration_secs': 0.390176} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.295827] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Reconfigured VM instance instance-00000050 to attach disk [datastore2] ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6/ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 892.296969] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cbcc9d42-ce19-4b50-85eb-9c6f6500ac84 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.307036] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017377, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.309048] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 892.309048] env[63345]: value = "task-1017378" [ 892.309048] env[63345]: _type = "Task" [ 892.309048] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.323583] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017378, 'name': Rename_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.366999] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b9b9db-aebc-c273-b6b0-f1cf8d11c4a8, 'name': SearchDatastore_Task, 'duration_secs': 0.014753} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.367944] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3206d8c-9a20-455e-ab84-1f0b3196a719 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.378757] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 892.378757] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52699fc0-c5ab-5320-28f7-e868c5b593fe" [ 892.378757] env[63345]: _type = "Task" [ 892.378757] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.388494] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52699fc0-c5ab-5320-28f7-e868c5b593fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.478154] env[63345]: DEBUG nova.compute.manager [req-403d854e-d4c8-41b3-bbb5-73f62dc22d21 req-1fb3e409-3e3b-4dd1-9ad9-1e1c7566cb73 service nova] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Received event network-vif-deleted-2bb8e7ea-091a-4a60-9a2b-e9b196790b55 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 892.478248] env[63345]: INFO nova.compute.manager [req-403d854e-d4c8-41b3-bbb5-73f62dc22d21 req-1fb3e409-3e3b-4dd1-9ad9-1e1c7566cb73 service nova] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Neutron deleted interface 2bb8e7ea-091a-4a60-9a2b-e9b196790b55; detaching it from the instance and deleting it from the info cache [ 892.478427] env[63345]: DEBUG nova.network.neutron [req-403d854e-d4c8-41b3-bbb5-73f62dc22d21 req-1fb3e409-3e3b-4dd1-9ad9-1e1c7566cb73 service nova] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.546938] env[63345]: INFO nova.scheduler.client.report [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Deleted allocation for migration 606711cc-3f86-47b3-9a65-4efb46ca0c65 [ 892.801710] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017377, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.821168] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017378, 'name': Rename_Task, 'duration_secs': 0.26487} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.821168] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 892.821468] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-40bcf364-c63c-4bbb-8069-8e2a60489436 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.830181] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 892.830181] env[63345]: value = "task-1017379" [ 892.830181] env[63345]: _type = "Task" [ 892.830181] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.839126] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017379, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.893177] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52699fc0-c5ab-5320-28f7-e868c5b593fe, 'name': SearchDatastore_Task, 'duration_secs': 0.030987} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.893177] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.893177] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] a415d4f2-abc7-4553-8442-312316e686b2/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk. {{(pid=63345) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 892.893177] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55f5e3e1-e253-4e3a-a201-e521ce801d75 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.901018] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 892.901018] env[63345]: value = "task-1017380" [ 892.901018] env[63345]: _type = "Task" [ 892.901018] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.909597] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017380, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.944107] env[63345]: DEBUG nova.network.neutron [-] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.985784] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-86fe4729-4d76-44bc-9971-83696e0aec7c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.998297] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86fc81e1-8514-4382-9db4-6fbaeb7620b8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.036598] env[63345]: DEBUG nova.compute.manager [req-403d854e-d4c8-41b3-bbb5-73f62dc22d21 req-1fb3e409-3e3b-4dd1-9ad9-1e1c7566cb73 service nova] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Detach interface failed, port_id=2bb8e7ea-091a-4a60-9a2b-e9b196790b55, reason: Instance 75fc8365-bf8d-489e-935f-a5169c6a7e62 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 893.059022] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9882f35-89e6-4dc7-8f48-84eee80f3558 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "11652422-9136-4453-b932-06695f9bc910" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 30.520s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.302896] env[63345]: DEBUG oslo_vmware.api [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017377, 'name': PowerOnVM_Task, 'duration_secs': 1.241743} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.306275] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 893.306552] env[63345]: INFO nova.compute.manager [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Took 9.75 seconds to spawn the instance on the hypervisor. [ 893.306802] env[63345]: DEBUG nova.compute.manager [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 893.308736] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260cd934-3d56-4142-a34e-03364e687f59 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.325282] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e2c15a-c485-4347-beac-fc0af6efdbb5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.341088] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f82cea-e779-49ad-a624-a637357b4ad4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.350029] env[63345]: DEBUG oslo_vmware.api [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017379, 'name': PowerOnVM_Task, 'duration_secs': 0.481307} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.350908] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 893.351226] env[63345]: DEBUG nova.compute.manager [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 893.352296] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb7d3f5-4e51-4c19-aaf3-15fd954cb355 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.387156] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0d191e-178d-4d46-8566-54ceb29b570e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.415280] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2481cdc1-b6bb-46da-bb20-945028f700fc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.426664] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017380, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.435780] env[63345]: DEBUG nova.compute.provider_tree [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.446961] env[63345]: INFO nova.compute.manager [-] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Took 1.37 seconds to deallocate network for instance. [ 893.832633] env[63345]: INFO nova.compute.manager [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Took 40.46 seconds to build instance. [ 893.903538] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.917931] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017380, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556509} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.917931] env[63345]: INFO nova.virt.vmwareapi.ds_util [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] a415d4f2-abc7-4553-8442-312316e686b2/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk. [ 893.920963] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d66677e9-998c-40cb-bb6c-1c291e0f34f3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.943094] env[63345]: DEBUG nova.scheduler.client.report [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 893.953760] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] a415d4f2-abc7-4553-8442-312316e686b2/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 893.956023] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.956023] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a1d4cea-be4c-4957-bd8d-944b92db2be7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.979887] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 893.979887] env[63345]: value = "task-1017381" [ 893.979887] env[63345]: _type = "Task" [ 893.979887] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.989093] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017381, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.335698] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1741673f-36b6-4ebb-ad3d-c06678998b49 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.972s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.363285] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Acquiring lock "ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.363285] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Lock "ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.363285] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Acquiring lock "ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.363285] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Lock "ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.363285] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Lock "ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.364130] env[63345]: INFO nova.compute.manager [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Terminating instance [ 894.455824] env[63345]: DEBUG oslo_concurrency.lockutils [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.496s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.456424] env[63345]: DEBUG nova.compute.manager [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 894.459033] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.468s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.460465] env[63345]: INFO nova.compute.claims [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 894.493696] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017381, 'name': ReconfigVM_Task, 'duration_secs': 0.307027} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.494022] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Reconfigured VM instance instance-00000051 to attach disk [datastore2] a415d4f2-abc7-4553-8442-312316e686b2/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 894.499887] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515a5e51-64ca-4426-b0d6-a3994417ab80 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.528071] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd31c475-0cce-4f3a-b907-83d414e4c54c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.547372] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 894.547372] env[63345]: value = "task-1017382" [ 894.547372] env[63345]: _type = "Task" [ 894.547372] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.557365] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017382, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.826269] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "11652422-9136-4453-b932-06695f9bc910" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.826562] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "11652422-9136-4453-b932-06695f9bc910" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.826771] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "11652422-9136-4453-b932-06695f9bc910-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.826964] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "11652422-9136-4453-b932-06695f9bc910-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.827659] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "11652422-9136-4453-b932-06695f9bc910-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.829683] env[63345]: INFO nova.compute.manager [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Terminating instance [ 894.868261] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Acquiring lock "refresh_cache-ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.868468] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Acquired lock "refresh_cache-ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.868657] env[63345]: DEBUG nova.network.neutron [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 894.971505] env[63345]: DEBUG nova.compute.utils [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 894.972642] env[63345]: DEBUG nova.compute.manager [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 894.972828] env[63345]: DEBUG nova.network.neutron [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 895.018334] env[63345]: DEBUG nova.policy [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '620aa8aab5b7456e8d0feda8a3d9a225', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc3b4aff33e540d79c796f98c315a05a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 895.058380] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017382, 'name': ReconfigVM_Task, 'duration_secs': 0.218474} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.058663] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 895.058915] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-110387d3-f933-46ca-bda3-7c89ead34ac5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.068085] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 895.068085] env[63345]: value = "task-1017383" [ 895.068085] env[63345]: _type = "Task" [ 895.068085] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.076182] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017383, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.324585] env[63345]: DEBUG nova.network.neutron [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Successfully created port: a624b77d-1b1f-4acb-8c80-d8c6fb0360b9 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 895.338039] env[63345]: DEBUG nova.compute.manager [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 895.338039] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 895.338039] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-500fd07b-7d49-4a3e-b4c5-deff48180637 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.348172] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 895.348380] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78b8688e-a870-4aa3-8a38-b9b156193716 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.359231] env[63345]: DEBUG oslo_vmware.api [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 895.359231] env[63345]: value = "task-1017384" [ 895.359231] env[63345]: _type = "Task" [ 895.359231] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.377015] env[63345]: DEBUG oslo_vmware.api [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017384, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.412970] env[63345]: DEBUG nova.network.neutron [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 895.479313] env[63345]: DEBUG nova.compute.manager [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 895.536964] env[63345]: DEBUG nova.network.neutron [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.558256] env[63345]: DEBUG oslo_concurrency.lockutils [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "0fe61754-458c-4c5c-bb2d-2677302e5fb9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.558682] env[63345]: DEBUG oslo_concurrency.lockutils [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "0fe61754-458c-4c5c-bb2d-2677302e5fb9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.580456] env[63345]: DEBUG oslo_vmware.api [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017383, 'name': PowerOnVM_Task, 'duration_secs': 0.4421} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.585255] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 895.588399] env[63345]: DEBUG nova.compute.manager [None req-b8b27e1a-9936-4de6-bb9e-9dc49f8dbe6d tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 895.589589] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af19b55-c196-4532-8ac1-3d937f587d9b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.831224] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cabb535f-2223-40e7-bc53-1a69a110ba50 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.840471] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f35c0ccc-f5f3-4696-819e-4e3bbf26c49a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.877609] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5df6682-b95d-4e3e-b6b0-16c1ef0b91a0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.886119] env[63345]: DEBUG oslo_vmware.api [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017384, 'name': PowerOffVM_Task, 'duration_secs': 0.263276} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.888207] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 895.888397] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 895.888689] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-be183254-59d8-4088-bc28-5244041bba82 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.891545] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65bf0cbf-5707-482f-8b4d-832e6d90fa5d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.907646] env[63345]: DEBUG nova.compute.provider_tree [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.977966] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 895.978384] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 895.978697] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Deleting the datastore file [datastore2] 11652422-9136-4453-b932-06695f9bc910 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 895.979074] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7bef131-deb6-49e4-8a3e-71620a675b6f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.991825] env[63345]: DEBUG oslo_vmware.api [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 895.991825] env[63345]: value = "task-1017386" [ 895.991825] env[63345]: _type = "Task" [ 895.991825] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.002544] env[63345]: DEBUG oslo_vmware.api [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017386, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.042034] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Releasing lock "refresh_cache-ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.042570] env[63345]: DEBUG nova.compute.manager [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 896.042830] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 896.044168] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d6dd66-939f-4fc8-9a19-c00f1457de82 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.053067] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 896.053370] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-653f81cb-6895-455c-9171-cfcdad3e2f0a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.062699] env[63345]: DEBUG nova.compute.manager [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 896.069045] env[63345]: DEBUG oslo_vmware.api [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 896.069045] env[63345]: value = "task-1017387" [ 896.069045] env[63345]: _type = "Task" [ 896.069045] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.076904] env[63345]: DEBUG oslo_vmware.api [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017387, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.261073] env[63345]: DEBUG nova.compute.manager [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 896.262382] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a8ea31-f418-452b-9f4e-e03183776d7e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.411211] env[63345]: DEBUG nova.scheduler.client.report [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 896.493580] env[63345]: DEBUG nova.compute.manager [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 896.513601] env[63345]: DEBUG oslo_vmware.api [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017386, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187362} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.513866] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 896.514070] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 896.514258] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 896.514441] env[63345]: INFO nova.compute.manager [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 11652422-9136-4453-b932-06695f9bc910] Took 1.18 seconds to destroy the instance on the hypervisor. [ 896.514689] env[63345]: DEBUG oslo.service.loopingcall [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 896.514884] env[63345]: DEBUG nova.compute.manager [-] [instance: 11652422-9136-4453-b932-06695f9bc910] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 896.514978] env[63345]: DEBUG nova.network.neutron [-] [instance: 11652422-9136-4453-b932-06695f9bc910] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 896.534503] env[63345]: DEBUG nova.virt.hardware [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 896.534864] env[63345]: DEBUG nova.virt.hardware [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 896.535043] env[63345]: DEBUG nova.virt.hardware [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 896.535242] env[63345]: DEBUG nova.virt.hardware [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 896.535392] env[63345]: DEBUG nova.virt.hardware [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 896.535547] env[63345]: DEBUG nova.virt.hardware [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 896.535764] env[63345]: DEBUG nova.virt.hardware [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 896.536269] env[63345]: DEBUG nova.virt.hardware [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 896.536269] env[63345]: DEBUG nova.virt.hardware [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 896.536339] env[63345]: DEBUG nova.virt.hardware [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 896.536473] env[63345]: DEBUG nova.virt.hardware [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 896.537372] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bcacbd1-d867-4104-a111-f881606bd281 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.546141] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe364b4-5225-440d-b008-d11829cd2108 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.585652] env[63345]: DEBUG oslo_vmware.api [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017387, 'name': PowerOffVM_Task, 'duration_secs': 0.139023} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.585652] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 896.585652] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 896.585652] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9f143937-93e2-4689-add7-bfc6f484eefb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.590690] env[63345]: DEBUG oslo_concurrency.lockutils [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.619042] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 896.619382] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 896.619775] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Deleting the datastore file [datastore2] ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 896.620084] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-929fe911-135e-4931-8d60-6bbd10168ec8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.637941] env[63345]: DEBUG oslo_vmware.api [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for the task: (returnval){ [ 896.637941] env[63345]: value = "task-1017389" [ 896.637941] env[63345]: _type = "Task" [ 896.637941] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.649370] env[63345]: DEBUG oslo_vmware.api [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017389, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.773652] env[63345]: INFO nova.compute.manager [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] instance snapshotting [ 896.778845] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34505f3c-d90b-438d-acc8-4a629ab3f88d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.809026] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6adadd12-3eeb-4979-8d53-f06da2c7b99b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.917198] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.457s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.918171] env[63345]: DEBUG nova.compute.manager [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 896.922144] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.138s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.923266] env[63345]: INFO nova.compute.claims [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 897.003991] env[63345]: DEBUG nova.network.neutron [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Successfully updated port: a624b77d-1b1f-4acb-8c80-d8c6fb0360b9 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 897.033890] env[63345]: DEBUG nova.compute.manager [req-37c8f329-7aa3-4b66-850b-89121d2915e9 req-eb21777f-58e6-49d8-9f7f-ad64844ea6f4 service nova] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Received event network-vif-plugged-a624b77d-1b1f-4acb-8c80-d8c6fb0360b9 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 897.034172] env[63345]: DEBUG oslo_concurrency.lockutils [req-37c8f329-7aa3-4b66-850b-89121d2915e9 req-eb21777f-58e6-49d8-9f7f-ad64844ea6f4 service nova] Acquiring lock "27e2cb12-d251-434a-b79e-6fbda80d3637-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.034393] env[63345]: DEBUG oslo_concurrency.lockutils [req-37c8f329-7aa3-4b66-850b-89121d2915e9 req-eb21777f-58e6-49d8-9f7f-ad64844ea6f4 service nova] Lock "27e2cb12-d251-434a-b79e-6fbda80d3637-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.034570] env[63345]: DEBUG oslo_concurrency.lockutils [req-37c8f329-7aa3-4b66-850b-89121d2915e9 req-eb21777f-58e6-49d8-9f7f-ad64844ea6f4 service nova] Lock "27e2cb12-d251-434a-b79e-6fbda80d3637-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.034746] env[63345]: DEBUG nova.compute.manager [req-37c8f329-7aa3-4b66-850b-89121d2915e9 req-eb21777f-58e6-49d8-9f7f-ad64844ea6f4 service nova] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] No waiting events found dispatching network-vif-plugged-a624b77d-1b1f-4acb-8c80-d8c6fb0360b9 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 897.034916] env[63345]: WARNING nova.compute.manager [req-37c8f329-7aa3-4b66-850b-89121d2915e9 req-eb21777f-58e6-49d8-9f7f-ad64844ea6f4 service nova] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Received unexpected event network-vif-plugged-a624b77d-1b1f-4acb-8c80-d8c6fb0360b9 for instance with vm_state building and task_state spawning. [ 897.083135] env[63345]: DEBUG nova.compute.manager [req-465ee6a2-ddff-4e52-b0c5-40416b39b45f req-a71e419e-2057-4a81-8d5e-b346487a444d service nova] [instance: 11652422-9136-4453-b932-06695f9bc910] Received event network-vif-deleted-71b7616d-5472-4d3c-a8ca-6984d7c70c12 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 897.083135] env[63345]: INFO nova.compute.manager [req-465ee6a2-ddff-4e52-b0c5-40416b39b45f req-a71e419e-2057-4a81-8d5e-b346487a444d service nova] [instance: 11652422-9136-4453-b932-06695f9bc910] Neutron deleted interface 71b7616d-5472-4d3c-a8ca-6984d7c70c12; detaching it from the instance and deleting it from the info cache [ 897.083135] env[63345]: DEBUG nova.network.neutron [req-465ee6a2-ddff-4e52-b0c5-40416b39b45f req-a71e419e-2057-4a81-8d5e-b346487a444d service nova] [instance: 11652422-9136-4453-b932-06695f9bc910] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.148345] env[63345]: DEBUG oslo_vmware.api [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Task: {'id': task-1017389, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.391399} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.148691] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 897.148845] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 897.149137] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 897.149210] env[63345]: INFO nova.compute.manager [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Took 1.11 seconds to destroy the instance on the hypervisor. [ 897.149461] env[63345]: DEBUG oslo.service.loopingcall [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 897.149663] env[63345]: DEBUG nova.compute.manager [-] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 897.149759] env[63345]: DEBUG nova.network.neutron [-] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 897.166719] env[63345]: DEBUG nova.network.neutron [-] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 897.323339] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Creating Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 897.325092] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c3a915d3-d90a-447c-a5f1-863757b9a90b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.334127] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 897.334127] env[63345]: value = "task-1017390" [ 897.334127] env[63345]: _type = "Task" [ 897.334127] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.345350] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017390, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.429231] env[63345]: DEBUG nova.compute.utils [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 897.432678] env[63345]: DEBUG nova.compute.manager [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 897.436019] env[63345]: DEBUG nova.network.neutron [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 897.481526] env[63345]: DEBUG nova.network.neutron [-] [instance: 11652422-9136-4453-b932-06695f9bc910] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.508229] env[63345]: DEBUG oslo_concurrency.lockutils [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "refresh_cache-27e2cb12-d251-434a-b79e-6fbda80d3637" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.508229] env[63345]: DEBUG oslo_concurrency.lockutils [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquired lock "refresh_cache-27e2cb12-d251-434a-b79e-6fbda80d3637" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.508229] env[63345]: DEBUG nova.network.neutron [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 897.513713] env[63345]: DEBUG nova.policy [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '95a6bbb5cce745e0bb1a4b36d4760f52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e9c7157843047ac8203d4fc5261572a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 897.589827] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-018a0e64-4119-4e23-bfb8-75550db135de {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.598279] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a92e4351-e17f-4cc9-85c8-bbe531c81189 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.616340] env[63345]: INFO nova.compute.manager [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Unrescuing [ 897.619672] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "refresh_cache-a415d4f2-abc7-4553-8442-312316e686b2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.620319] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquired lock "refresh_cache-a415d4f2-abc7-4553-8442-312316e686b2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.620672] env[63345]: DEBUG nova.network.neutron [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 897.650691] env[63345]: DEBUG nova.compute.manager [req-465ee6a2-ddff-4e52-b0c5-40416b39b45f req-a71e419e-2057-4a81-8d5e-b346487a444d service nova] [instance: 11652422-9136-4453-b932-06695f9bc910] Detach interface failed, port_id=71b7616d-5472-4d3c-a8ca-6984d7c70c12, reason: Instance 11652422-9136-4453-b932-06695f9bc910 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 897.673345] env[63345]: DEBUG nova.network.neutron [-] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.779165] env[63345]: DEBUG nova.network.neutron [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Successfully created port: d861c19b-10d8-47c4-90d2-a823d9faa164 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 897.847297] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017390, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.936523] env[63345]: DEBUG nova.compute.manager [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 897.985451] env[63345]: INFO nova.compute.manager [-] [instance: 11652422-9136-4453-b932-06695f9bc910] Took 1.47 seconds to deallocate network for instance. [ 898.083585] env[63345]: DEBUG nova.network.neutron [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 898.179738] env[63345]: INFO nova.compute.manager [-] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Took 1.03 seconds to deallocate network for instance. [ 898.291010] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e882e37b-0050-4dce-a49f-3c3607a33a22 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.299555] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142cb686-cc88-460f-9584-2377375b0722 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.335673] env[63345]: DEBUG nova.network.neutron [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Updating instance_info_cache with network_info: [{"id": "f2c021c6-dbd4-40da-80c8-19678be6d78c", "address": "fa:16:3e:ae:e4:3b", "network": {"id": "dc725254-60a8-4edc-aab2-604dfb70677d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1100061234-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "34efcd7d600f49698c6619be002d838f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2c021c6-db", "ovs_interfaceid": "f2c021c6-dbd4-40da-80c8-19678be6d78c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.336877] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d701624-9e4c-448a-9304-c79fb3e64562 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.351939] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b1fcf3-225b-4100-8fa8-b67dacd1743a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.356372] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017390, 'name': CreateSnapshot_Task, 'duration_secs': 0.580689} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.357413] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Created Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 898.358616] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f447f659-20fc-4e40-a134-b65925fb69cc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.369572] env[63345]: DEBUG nova.compute.provider_tree [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.380367] env[63345]: DEBUG nova.network.neutron [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Updating instance_info_cache with network_info: [{"id": "a624b77d-1b1f-4acb-8c80-d8c6fb0360b9", "address": "fa:16:3e:ef:51:c9", "network": {"id": "c9f406eb-96bc-4c63-8f76-474a8fcc4f7d", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1744519845-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc3b4aff33e540d79c796f98c315a05a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa624b77d-1b", "ovs_interfaceid": "a624b77d-1b1f-4acb-8c80-d8c6fb0360b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.492544] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.688317] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.840971] env[63345]: DEBUG oslo_concurrency.lockutils [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Releasing lock "refresh_cache-a415d4f2-abc7-4553-8442-312316e686b2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.841747] env[63345]: DEBUG nova.objects.instance [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lazy-loading 'flavor' on Instance uuid a415d4f2-abc7-4553-8442-312316e686b2 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 898.872120] env[63345]: DEBUG nova.scheduler.client.report [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 898.890123] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Creating linked-clone VM from snapshot {{(pid=63345) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 898.891022] env[63345]: DEBUG oslo_concurrency.lockutils [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Releasing lock "refresh_cache-27e2cb12-d251-434a-b79e-6fbda80d3637" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.891022] env[63345]: DEBUG nova.compute.manager [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Instance network_info: |[{"id": "a624b77d-1b1f-4acb-8c80-d8c6fb0360b9", "address": "fa:16:3e:ef:51:c9", "network": {"id": "c9f406eb-96bc-4c63-8f76-474a8fcc4f7d", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1744519845-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc3b4aff33e540d79c796f98c315a05a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa624b77d-1b", "ovs_interfaceid": "a624b77d-1b1f-4acb-8c80-d8c6fb0360b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 898.891795] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-10877c2f-1e51-4f45-8f51-2211dff19aab {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.895058] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:51:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a624b77d-1b1f-4acb-8c80-d8c6fb0360b9', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 898.903533] env[63345]: DEBUG oslo.service.loopingcall [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 898.903983] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 898.904684] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a843965f-915d-4c84-b0a4-b85f2df98493 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.922223] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 898.922223] env[63345]: value = "task-1017391" [ 898.922223] env[63345]: _type = "Task" [ 898.922223] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.934617] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 898.934617] env[63345]: value = "task-1017392" [ 898.934617] env[63345]: _type = "Task" [ 898.934617] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.936121] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017391, 'name': CloneVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.943755] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017392, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.949254] env[63345]: DEBUG nova.compute.manager [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 898.976335] env[63345]: DEBUG nova.virt.hardware [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 898.976605] env[63345]: DEBUG nova.virt.hardware [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 898.976775] env[63345]: DEBUG nova.virt.hardware [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 898.976972] env[63345]: DEBUG nova.virt.hardware [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 898.977603] env[63345]: DEBUG nova.virt.hardware [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 898.977603] env[63345]: DEBUG nova.virt.hardware [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 898.977603] env[63345]: DEBUG nova.virt.hardware [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 898.977808] env[63345]: DEBUG nova.virt.hardware [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 898.977853] env[63345]: DEBUG nova.virt.hardware [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 898.978032] env[63345]: DEBUG nova.virt.hardware [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 898.978228] env[63345]: DEBUG nova.virt.hardware [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 898.979126] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468df9f1-1e6c-4d07-aac1-af5069519cfd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.987767] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-303d0fe8-ea03-4b93-aa7e-ea30cb490ccc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.064791] env[63345]: DEBUG nova.compute.manager [req-b26031c7-c747-44df-b8e7-4034d032feb2 req-9a96c1a4-ba77-4afe-a862-a242a2121d60 service nova] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Received event network-changed-a624b77d-1b1f-4acb-8c80-d8c6fb0360b9 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 899.065118] env[63345]: DEBUG nova.compute.manager [req-b26031c7-c747-44df-b8e7-4034d032feb2 req-9a96c1a4-ba77-4afe-a862-a242a2121d60 service nova] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Refreshing instance network info cache due to event network-changed-a624b77d-1b1f-4acb-8c80-d8c6fb0360b9. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 899.065441] env[63345]: DEBUG oslo_concurrency.lockutils [req-b26031c7-c747-44df-b8e7-4034d032feb2 req-9a96c1a4-ba77-4afe-a862-a242a2121d60 service nova] Acquiring lock "refresh_cache-27e2cb12-d251-434a-b79e-6fbda80d3637" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.065688] env[63345]: DEBUG oslo_concurrency.lockutils [req-b26031c7-c747-44df-b8e7-4034d032feb2 req-9a96c1a4-ba77-4afe-a862-a242a2121d60 service nova] Acquired lock "refresh_cache-27e2cb12-d251-434a-b79e-6fbda80d3637" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.065929] env[63345]: DEBUG nova.network.neutron [req-b26031c7-c747-44df-b8e7-4034d032feb2 req-9a96c1a4-ba77-4afe-a862-a242a2121d60 service nova] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Refreshing network info cache for port a624b77d-1b1f-4acb-8c80-d8c6fb0360b9 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 899.191249] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "726332dd-8699-49a4-a9ea-b9cbfc159855" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.193309] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "726332dd-8699-49a4-a9ea-b9cbfc159855" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.349562] env[63345]: DEBUG nova.network.neutron [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Successfully updated port: d861c19b-10d8-47c4-90d2-a823d9faa164 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 899.352055] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a197b154-89f4-4a8c-86cc-e03487dc1886 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.374457] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 899.376248] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb757421-5912-401a-9ad3-a888816657e2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.379673] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.458s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.379673] env[63345]: DEBUG nova.compute.manager [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 899.383185] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 25.286s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.383185] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.383185] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63345) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 899.383437] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.626s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.383749] env[63345]: DEBUG nova.objects.instance [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lazy-loading 'resources' on Instance uuid 070a834d-6478-4705-8df0-2a27c8780507 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 899.386482] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32cee01-45fb-4cf4-a29d-8938936812e3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.392063] env[63345]: DEBUG oslo_vmware.api [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 899.392063] env[63345]: value = "task-1017393" [ 899.392063] env[63345]: _type = "Task" [ 899.392063] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.401360] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4348da-6e46-481e-929c-42e74272badf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.414950] env[63345]: DEBUG oslo_vmware.api [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017393, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.427807] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6c7aed-207e-41c3-94d7-fc8ac41331a9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.437923] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017391, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.446620] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f35ac1-76c8-49a4-81fa-f7d5c7e295d0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.456262] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017392, 'name': CreateVM_Task, 'duration_secs': 0.394295} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.480412] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 899.481185] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179574MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=63345) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 899.481350] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.482244] env[63345]: DEBUG oslo_concurrency.lockutils [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.482444] env[63345]: DEBUG oslo_concurrency.lockutils [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.482821] env[63345]: DEBUG oslo_concurrency.lockutils [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 899.483116] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01eb939f-c68d-4b86-b200-2f1f7f9a48c5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.489056] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 899.489056] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52bc467d-6be3-6beb-b6d2-b823f92a661d" [ 899.489056] env[63345]: _type = "Task" [ 899.489056] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.499596] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52bc467d-6be3-6beb-b6d2-b823f92a661d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.694295] env[63345]: DEBUG nova.compute.manager [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 899.790309] env[63345]: DEBUG nova.network.neutron [req-b26031c7-c747-44df-b8e7-4034d032feb2 req-9a96c1a4-ba77-4afe-a862-a242a2121d60 service nova] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Updated VIF entry in instance network info cache for port a624b77d-1b1f-4acb-8c80-d8c6fb0360b9. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 899.790704] env[63345]: DEBUG nova.network.neutron [req-b26031c7-c747-44df-b8e7-4034d032feb2 req-9a96c1a4-ba77-4afe-a862-a242a2121d60 service nova] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Updating instance_info_cache with network_info: [{"id": "a624b77d-1b1f-4acb-8c80-d8c6fb0360b9", "address": "fa:16:3e:ef:51:c9", "network": {"id": "c9f406eb-96bc-4c63-8f76-474a8fcc4f7d", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1744519845-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc3b4aff33e540d79c796f98c315a05a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa624b77d-1b", "ovs_interfaceid": "a624b77d-1b1f-4acb-8c80-d8c6fb0360b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.855624] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Acquiring lock "refresh_cache-017a06b3-cc1a-4822-a07f-ca881fd4254b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.855814] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Acquired lock "refresh_cache-017a06b3-cc1a-4822-a07f-ca881fd4254b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.856376] env[63345]: DEBUG nova.network.neutron [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 899.885714] env[63345]: DEBUG nova.compute.utils [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 899.885865] env[63345]: DEBUG nova.compute.manager [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 899.885938] env[63345]: DEBUG nova.network.neutron [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 899.890707] env[63345]: DEBUG nova.objects.instance [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lazy-loading 'numa_topology' on Instance uuid 070a834d-6478-4705-8df0-2a27c8780507 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 899.907972] env[63345]: DEBUG oslo_vmware.api [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017393, 'name': PowerOffVM_Task, 'duration_secs': 0.216564} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.908331] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 899.914843] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Reconfiguring VM instance instance-00000051 to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 899.915429] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3e4499f-b079-4261-a855-b27171d204a9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.932079] env[63345]: DEBUG nova.policy [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fce058d27d8e4da19af436b282b37f32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63d7b3facae6416989f763e610cf98f7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 899.943058] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017391, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.944389] env[63345]: DEBUG oslo_vmware.api [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 899.944389] env[63345]: value = "task-1017394" [ 899.944389] env[63345]: _type = "Task" [ 899.944389] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.952223] env[63345]: DEBUG oslo_vmware.api [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017394, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.005372] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52bc467d-6be3-6beb-b6d2-b823f92a661d, 'name': SearchDatastore_Task, 'duration_secs': 0.011814} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.005509] env[63345]: DEBUG oslo_concurrency.lockutils [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.007974] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 900.007974] env[63345]: DEBUG oslo_concurrency.lockutils [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.007974] env[63345]: DEBUG oslo_concurrency.lockutils [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.007974] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 900.007974] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5dcf6c40-19af-415a-b161-519436c770fb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.015635] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 900.015890] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 900.016876] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acdda409-f7c1-4477-910a-ecbbfc90c311 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.023034] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 900.023034] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529d80ec-57d7-a5dd-a493-a95d2c9e1718" [ 900.023034] env[63345]: _type = "Task" [ 900.023034] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.031929] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]529d80ec-57d7-a5dd-a493-a95d2c9e1718, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.191700] env[63345]: DEBUG nova.network.neutron [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Successfully created port: 2ddc051b-7b20-4e01-8f07-b53ddf9efada {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 900.224657] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.294020] env[63345]: DEBUG oslo_concurrency.lockutils [req-b26031c7-c747-44df-b8e7-4034d032feb2 req-9a96c1a4-ba77-4afe-a862-a242a2121d60 service nova] Releasing lock "refresh_cache-27e2cb12-d251-434a-b79e-6fbda80d3637" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.386900] env[63345]: DEBUG nova.network.neutron [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 900.389107] env[63345]: DEBUG nova.compute.manager [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 900.393669] env[63345]: DEBUG nova.objects.base [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Object Instance<070a834d-6478-4705-8df0-2a27c8780507> lazy-loaded attributes: resources,numa_topology {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 900.444488] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017391, 'name': CloneVM_Task} progress is 95%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.456665] env[63345]: DEBUG oslo_vmware.api [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017394, 'name': ReconfigVM_Task, 'duration_secs': 0.382595} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.456962] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Reconfigured VM instance instance-00000051 to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 900.457181] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 900.457438] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3420a484-cd0d-4268-a5a2-ba81f49c7c22 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.467166] env[63345]: DEBUG oslo_vmware.api [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 900.467166] env[63345]: value = "task-1017395" [ 900.467166] env[63345]: _type = "Task" [ 900.467166] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.482422] env[63345]: DEBUG oslo_vmware.api [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017395, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.535618] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]529d80ec-57d7-a5dd-a493-a95d2c9e1718, 'name': SearchDatastore_Task, 'duration_secs': 0.010349} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.536924] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8fd96bc-d78b-481f-905c-8879786d7f13 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.542656] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 900.542656] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]522c9850-8236-3b22-e5df-0a430c746c85" [ 900.542656] env[63345]: _type = "Task" [ 900.542656] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.553658] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]522c9850-8236-3b22-e5df-0a430c746c85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.619979] env[63345]: DEBUG nova.network.neutron [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Updating instance_info_cache with network_info: [{"id": "d861c19b-10d8-47c4-90d2-a823d9faa164", "address": "fa:16:3e:c9:2c:68", "network": {"id": "5a619899-0632-4b47-a853-63998d2913e7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1507203599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e9c7157843047ac8203d4fc5261572a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd861c19b-10", "ovs_interfaceid": "d861c19b-10d8-47c4-90d2-a823d9faa164", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.712759] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfb9202-26f6-4601-8e5c-673d4f656e8d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.720545] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3475ee7f-46ef-4f21-85b7-244933aa05aa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.753434] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07aa5473-d7d2-4dd9-9f4a-26a44885c7fd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.761269] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b5cd692-d35c-46a7-9746-1630ba822c53 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.775689] env[63345]: DEBUG nova.compute.provider_tree [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.945670] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017391, 'name': CloneVM_Task, 'duration_secs': 1.954121} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.945825] env[63345]: INFO nova.virt.vmwareapi.vmops [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Created linked-clone VM from snapshot [ 900.946563] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea14484-a16d-41f6-8d18-f7aa477d3616 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.954588] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Uploading image 2085dc83-f9bf-4a82-892d-dba5cfcd7897 {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 900.977929] env[63345]: DEBUG oslo_vmware.api [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017395, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.979542] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Destroying the VM {{(pid=63345) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 900.979798] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4266f15a-5b69-4f6b-babd-b3579f0a0a83 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.987696] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 900.987696] env[63345]: value = "task-1017396" [ 900.987696] env[63345]: _type = "Task" [ 900.987696] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.996332] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017396, 'name': Destroy_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.054455] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]522c9850-8236-3b22-e5df-0a430c746c85, 'name': SearchDatastore_Task, 'duration_secs': 0.018952} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.055053] env[63345]: DEBUG oslo_concurrency.lockutils [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.058018] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 27e2cb12-d251-434a-b79e-6fbda80d3637/27e2cb12-d251-434a-b79e-6fbda80d3637.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 901.058018] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-677821dc-b299-4b5d-93a6-47d480d0b0b1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.066022] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 901.066022] env[63345]: value = "task-1017397" [ 901.066022] env[63345]: _type = "Task" [ 901.066022] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.076639] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017397, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.126020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Releasing lock "refresh_cache-017a06b3-cc1a-4822-a07f-ca881fd4254b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.126020] env[63345]: DEBUG nova.compute.manager [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Instance network_info: |[{"id": "d861c19b-10d8-47c4-90d2-a823d9faa164", "address": "fa:16:3e:c9:2c:68", "network": {"id": "5a619899-0632-4b47-a853-63998d2913e7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1507203599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e9c7157843047ac8203d4fc5261572a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd861c19b-10", "ovs_interfaceid": "d861c19b-10d8-47c4-90d2-a823d9faa164", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 901.126020] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:2c:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3739ba33-c119-432c-9aee-80a62864317d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd861c19b-10d8-47c4-90d2-a823d9faa164', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 901.132324] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Creating folder: Project (7e9c7157843047ac8203d4fc5261572a). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 901.132866] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90687bf0-98d7-4bab-82df-6b30151236fa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.147526] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Created folder: Project (7e9c7157843047ac8203d4fc5261572a) in parent group-v225918. [ 901.147526] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Creating folder: Instances. Parent ref: group-v226093. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 901.147526] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-95a73d9e-cb75-4bed-acf1-b39b5596bd07 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.159797] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Created folder: Instances in parent group-v226093. [ 901.160257] env[63345]: DEBUG oslo.service.loopingcall [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 901.160594] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 901.160938] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90aa95ed-4dad-409a-8f90-3d0e0abe8273 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.183977] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 901.183977] env[63345]: value = "task-1017400" [ 901.183977] env[63345]: _type = "Task" [ 901.183977] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.192962] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017400, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.213335] env[63345]: DEBUG nova.compute.manager [req-d70e63e2-9764-4590-b65b-2e965d486896 req-1c7a934a-5ef5-4ee7-8e03-90aa25d493a0 service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Received event network-vif-plugged-d861c19b-10d8-47c4-90d2-a823d9faa164 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 901.213621] env[63345]: DEBUG oslo_concurrency.lockutils [req-d70e63e2-9764-4590-b65b-2e965d486896 req-1c7a934a-5ef5-4ee7-8e03-90aa25d493a0 service nova] Acquiring lock "017a06b3-cc1a-4822-a07f-ca881fd4254b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.213885] env[63345]: DEBUG oslo_concurrency.lockutils [req-d70e63e2-9764-4590-b65b-2e965d486896 req-1c7a934a-5ef5-4ee7-8e03-90aa25d493a0 service nova] Lock "017a06b3-cc1a-4822-a07f-ca881fd4254b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.214129] env[63345]: DEBUG oslo_concurrency.lockutils [req-d70e63e2-9764-4590-b65b-2e965d486896 req-1c7a934a-5ef5-4ee7-8e03-90aa25d493a0 service nova] Lock "017a06b3-cc1a-4822-a07f-ca881fd4254b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.214350] env[63345]: DEBUG nova.compute.manager [req-d70e63e2-9764-4590-b65b-2e965d486896 req-1c7a934a-5ef5-4ee7-8e03-90aa25d493a0 service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] No waiting events found dispatching network-vif-plugged-d861c19b-10d8-47c4-90d2-a823d9faa164 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 901.214580] env[63345]: WARNING nova.compute.manager [req-d70e63e2-9764-4590-b65b-2e965d486896 req-1c7a934a-5ef5-4ee7-8e03-90aa25d493a0 service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Received unexpected event network-vif-plugged-d861c19b-10d8-47c4-90d2-a823d9faa164 for instance with vm_state building and task_state spawning. [ 901.214796] env[63345]: DEBUG nova.compute.manager [req-d70e63e2-9764-4590-b65b-2e965d486896 req-1c7a934a-5ef5-4ee7-8e03-90aa25d493a0 service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Received event network-changed-d861c19b-10d8-47c4-90d2-a823d9faa164 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 901.215013] env[63345]: DEBUG nova.compute.manager [req-d70e63e2-9764-4590-b65b-2e965d486896 req-1c7a934a-5ef5-4ee7-8e03-90aa25d493a0 service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Refreshing instance network info cache due to event network-changed-d861c19b-10d8-47c4-90d2-a823d9faa164. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 901.215307] env[63345]: DEBUG oslo_concurrency.lockutils [req-d70e63e2-9764-4590-b65b-2e965d486896 req-1c7a934a-5ef5-4ee7-8e03-90aa25d493a0 service nova] Acquiring lock "refresh_cache-017a06b3-cc1a-4822-a07f-ca881fd4254b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.215455] env[63345]: DEBUG oslo_concurrency.lockutils [req-d70e63e2-9764-4590-b65b-2e965d486896 req-1c7a934a-5ef5-4ee7-8e03-90aa25d493a0 service nova] Acquired lock "refresh_cache-017a06b3-cc1a-4822-a07f-ca881fd4254b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.215688] env[63345]: DEBUG nova.network.neutron [req-d70e63e2-9764-4590-b65b-2e965d486896 req-1c7a934a-5ef5-4ee7-8e03-90aa25d493a0 service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Refreshing network info cache for port d861c19b-10d8-47c4-90d2-a823d9faa164 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 901.279324] env[63345]: DEBUG nova.scheduler.client.report [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 901.401093] env[63345]: DEBUG nova.compute.manager [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 901.437199] env[63345]: DEBUG nova.virt.hardware [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 901.437546] env[63345]: DEBUG nova.virt.hardware [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 901.437790] env[63345]: DEBUG nova.virt.hardware [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 901.438056] env[63345]: DEBUG nova.virt.hardware [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 901.438260] env[63345]: DEBUG nova.virt.hardware [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 901.438518] env[63345]: DEBUG nova.virt.hardware [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 901.438795] env[63345]: DEBUG nova.virt.hardware [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 901.438982] env[63345]: DEBUG nova.virt.hardware [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 901.439273] env[63345]: DEBUG nova.virt.hardware [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 901.439606] env[63345]: DEBUG nova.virt.hardware [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 901.439911] env[63345]: DEBUG nova.virt.hardware [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 901.441357] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0220370b-25c6-437e-ac2e-f5fe830d1320 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.453171] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5cb983-ba7c-4498-ad82-70948d12b4d5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.477905] env[63345]: DEBUG oslo_vmware.api [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017395, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.498422] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017396, 'name': Destroy_Task} progress is 33%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.576497] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017397, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.694458] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017400, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.789221] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.403s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.789960] env[63345]: DEBUG oslo_concurrency.lockutils [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 23.269s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.790407] env[63345]: DEBUG nova.objects.instance [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63345) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 901.817414] env[63345]: DEBUG nova.network.neutron [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Successfully updated port: 2ddc051b-7b20-4e01-8f07-b53ddf9efada {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 901.984020] env[63345]: DEBUG oslo_vmware.api [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017395, 'name': PowerOnVM_Task, 'duration_secs': 1.274374} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.984020] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 901.984020] env[63345]: DEBUG nova.compute.manager [None req-c7d439ee-9096-4035-8f9d-f03677031f4b tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 901.984020] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c026877-1679-4e6a-b095-8d1083e7bd68 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.003803] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017396, 'name': Destroy_Task, 'duration_secs': 0.676422} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.004082] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Destroyed the VM [ 902.004324] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Deleting Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 902.004568] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2bdab50f-e3d7-42d3-b895-cf37d41f26da {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.011623] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 902.011623] env[63345]: value = "task-1017401" [ 902.011623] env[63345]: _type = "Task" [ 902.011623] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.019613] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017401, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.072143] env[63345]: DEBUG nova.network.neutron [req-d70e63e2-9764-4590-b65b-2e965d486896 req-1c7a934a-5ef5-4ee7-8e03-90aa25d493a0 service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Updated VIF entry in instance network info cache for port d861c19b-10d8-47c4-90d2-a823d9faa164. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 902.072555] env[63345]: DEBUG nova.network.neutron [req-d70e63e2-9764-4590-b65b-2e965d486896 req-1c7a934a-5ef5-4ee7-8e03-90aa25d493a0 service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Updating instance_info_cache with network_info: [{"id": "d861c19b-10d8-47c4-90d2-a823d9faa164", "address": "fa:16:3e:c9:2c:68", "network": {"id": "5a619899-0632-4b47-a853-63998d2913e7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1507203599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e9c7157843047ac8203d4fc5261572a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd861c19b-10", "ovs_interfaceid": "d861c19b-10d8-47c4-90d2-a823d9faa164", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.077495] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017397, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539699} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.077986] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 27e2cb12-d251-434a-b79e-6fbda80d3637/27e2cb12-d251-434a-b79e-6fbda80d3637.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 902.078248] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 902.078509] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90fcc7b5-acd1-4428-a771-99cac8711ffe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.086659] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 902.086659] env[63345]: value = "task-1017402" [ 902.086659] env[63345]: _type = "Task" [ 902.086659] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.095544] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017402, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.200019] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017400, 'name': CreateVM_Task, 'duration_secs': 0.560558} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.200019] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 902.200019] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.200019] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.200019] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 902.200019] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f1f5027-2e59-4a67-90a2-63c49c2c44fb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.205907] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Waiting for the task: (returnval){ [ 902.205907] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5258d055-56cb-b408-d39f-52c74a6d47bf" [ 902.205907] env[63345]: _type = "Task" [ 902.205907] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.214092] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5258d055-56cb-b408-d39f-52c74a6d47bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.306642] env[63345]: DEBUG oslo_concurrency.lockutils [None req-28facb14-516c-4b3e-857f-461cf3ee194b tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "070a834d-6478-4705-8df0-2a27c8780507" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 51.691s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.310108] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "070a834d-6478-4705-8df0-2a27c8780507" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 23.515s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.310108] env[63345]: INFO nova.compute.manager [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Unshelving [ 902.324720] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "refresh_cache-c84c8b9a-9164-4dd7-b094-dd09c15c6f21" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.324720] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "refresh_cache-c84c8b9a-9164-4dd7-b094-dd09c15c6f21" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.324720] env[63345]: DEBUG nova.network.neutron [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 902.521493] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017401, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.578980] env[63345]: DEBUG oslo_concurrency.lockutils [req-d70e63e2-9764-4590-b65b-2e965d486896 req-1c7a934a-5ef5-4ee7-8e03-90aa25d493a0 service nova] Releasing lock "refresh_cache-017a06b3-cc1a-4822-a07f-ca881fd4254b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.597190] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017402, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068375} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.597580] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 902.598390] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9d294f-f9ec-446c-895c-90f327a9788d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.621170] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] 27e2cb12-d251-434a-b79e-6fbda80d3637/27e2cb12-d251-434a-b79e-6fbda80d3637.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 902.621505] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5962f201-449a-4bfc-a149-d0b3b132604b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.643381] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 902.643381] env[63345]: value = "task-1017403" [ 902.643381] env[63345]: _type = "Task" [ 902.643381] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.653695] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017403, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.717164] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5258d055-56cb-b408-d39f-52c74a6d47bf, 'name': SearchDatastore_Task, 'duration_secs': 0.010525} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.717640] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.717759] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 902.717988] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.718159] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.719346] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 902.719346] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b7514eca-4e38-4718-8121-a0de0887b501 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.735500] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 902.735500] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 902.735927] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-156f514d-e62a-44ac-baaf-f0b1072a9fa4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.742088] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Waiting for the task: (returnval){ [ 902.742088] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52a83c25-2ada-856b-9f24-8475e6368677" [ 902.742088] env[63345]: _type = "Task" [ 902.742088] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.750642] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a83c25-2ada-856b-9f24-8475e6368677, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.809643] env[63345]: DEBUG oslo_concurrency.lockutils [None req-989a4655-0fdc-4ee1-a2ad-09e7b05f405e tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.810654] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.127s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.812743] env[63345]: INFO nova.compute.claims [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 902.882124] env[63345]: DEBUG nova.network.neutron [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 903.022885] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017401, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.155929] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017403, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.160330] env[63345]: DEBUG nova.network.neutron [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Updating instance_info_cache with network_info: [{"id": "2ddc051b-7b20-4e01-8f07-b53ddf9efada", "address": "fa:16:3e:b9:70:a6", "network": {"id": "f05df594-fc76-4e2d-b29b-6942fee8dc99", "bridge": "br-int", "label": "tempest-ServersTestJSON-241206779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63d7b3facae6416989f763e610cf98f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ddc051b-7b", "ovs_interfaceid": "2ddc051b-7b20-4e01-8f07-b53ddf9efada", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.253907] env[63345]: DEBUG nova.compute.manager [req-8cfcb62b-ba0b-406f-9777-f55684700f58 req-3bb115de-5578-4acc-ab37-55d3360e008c service nova] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Received event network-vif-plugged-2ddc051b-7b20-4e01-8f07-b53ddf9efada {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 903.253907] env[63345]: DEBUG oslo_concurrency.lockutils [req-8cfcb62b-ba0b-406f-9777-f55684700f58 req-3bb115de-5578-4acc-ab37-55d3360e008c service nova] Acquiring lock "c84c8b9a-9164-4dd7-b094-dd09c15c6f21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.253907] env[63345]: DEBUG oslo_concurrency.lockutils [req-8cfcb62b-ba0b-406f-9777-f55684700f58 req-3bb115de-5578-4acc-ab37-55d3360e008c service nova] Lock "c84c8b9a-9164-4dd7-b094-dd09c15c6f21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.253907] env[63345]: DEBUG oslo_concurrency.lockutils [req-8cfcb62b-ba0b-406f-9777-f55684700f58 req-3bb115de-5578-4acc-ab37-55d3360e008c service nova] Lock "c84c8b9a-9164-4dd7-b094-dd09c15c6f21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.254111] env[63345]: DEBUG nova.compute.manager [req-8cfcb62b-ba0b-406f-9777-f55684700f58 req-3bb115de-5578-4acc-ab37-55d3360e008c service nova] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] No waiting events found dispatching network-vif-plugged-2ddc051b-7b20-4e01-8f07-b53ddf9efada {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 903.254230] env[63345]: WARNING nova.compute.manager [req-8cfcb62b-ba0b-406f-9777-f55684700f58 req-3bb115de-5578-4acc-ab37-55d3360e008c service nova] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Received unexpected event network-vif-plugged-2ddc051b-7b20-4e01-8f07-b53ddf9efada for instance with vm_state building and task_state spawning. [ 903.255587] env[63345]: DEBUG nova.compute.manager [req-8cfcb62b-ba0b-406f-9777-f55684700f58 req-3bb115de-5578-4acc-ab37-55d3360e008c service nova] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Received event network-changed-2ddc051b-7b20-4e01-8f07-b53ddf9efada {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 903.255587] env[63345]: DEBUG nova.compute.manager [req-8cfcb62b-ba0b-406f-9777-f55684700f58 req-3bb115de-5578-4acc-ab37-55d3360e008c service nova] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Refreshing instance network info cache due to event network-changed-2ddc051b-7b20-4e01-8f07-b53ddf9efada. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 903.255587] env[63345]: DEBUG oslo_concurrency.lockutils [req-8cfcb62b-ba0b-406f-9777-f55684700f58 req-3bb115de-5578-4acc-ab37-55d3360e008c service nova] Acquiring lock "refresh_cache-c84c8b9a-9164-4dd7-b094-dd09c15c6f21" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.262814] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a83c25-2ada-856b-9f24-8475e6368677, 'name': SearchDatastore_Task, 'duration_secs': 0.049861} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.263816] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3a8e3ca-2d32-4c1a-9e96-20a6a6b807d2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.269980] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Waiting for the task: (returnval){ [ 903.269980] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]527a9521-ab71-0abb-2a77-872ee21f5acb" [ 903.269980] env[63345]: _type = "Task" [ 903.269980] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.279109] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]527a9521-ab71-0abb-2a77-872ee21f5acb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.360321] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.522890] env[63345]: DEBUG oslo_vmware.api [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017401, 'name': RemoveSnapshot_Task, 'duration_secs': 1.06045} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.523068] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Deleted Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 903.655771] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017403, 'name': ReconfigVM_Task, 'duration_secs': 0.834302} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.656118] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Reconfigured VM instance instance-00000053 to attach disk [datastore2] 27e2cb12-d251-434a-b79e-6fbda80d3637/27e2cb12-d251-434a-b79e-6fbda80d3637.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 903.656730] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd0c2325-3ba4-444f-8ab0-3ec7636c2aa7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.664793] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "refresh_cache-c84c8b9a-9164-4dd7-b094-dd09c15c6f21" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.665131] env[63345]: DEBUG nova.compute.manager [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Instance network_info: |[{"id": "2ddc051b-7b20-4e01-8f07-b53ddf9efada", "address": "fa:16:3e:b9:70:a6", "network": {"id": "f05df594-fc76-4e2d-b29b-6942fee8dc99", "bridge": "br-int", "label": "tempest-ServersTestJSON-241206779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63d7b3facae6416989f763e610cf98f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ddc051b-7b", "ovs_interfaceid": "2ddc051b-7b20-4e01-8f07-b53ddf9efada", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 903.665623] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 903.665623] env[63345]: value = "task-1017404" [ 903.665623] env[63345]: _type = "Task" [ 903.665623] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.666037] env[63345]: DEBUG oslo_concurrency.lockutils [req-8cfcb62b-ba0b-406f-9777-f55684700f58 req-3bb115de-5578-4acc-ab37-55d3360e008c service nova] Acquired lock "refresh_cache-c84c8b9a-9164-4dd7-b094-dd09c15c6f21" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.666184] env[63345]: DEBUG nova.network.neutron [req-8cfcb62b-ba0b-406f-9777-f55684700f58 req-3bb115de-5578-4acc-ab37-55d3360e008c service nova] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Refreshing network info cache for port 2ddc051b-7b20-4e01-8f07-b53ddf9efada {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 903.667625] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:70:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ddc051b-7b20-4e01-8f07-b53ddf9efada', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 903.676454] env[63345]: DEBUG oslo.service.loopingcall [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 903.677740] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 903.681404] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34c48c38-8fa4-4855-be46-9ff61684da41 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.705395] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017404, 'name': Rename_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.707045] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 903.707045] env[63345]: value = "task-1017405" [ 903.707045] env[63345]: _type = "Task" [ 903.707045] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.715858] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017405, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.784452] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]527a9521-ab71-0abb-2a77-872ee21f5acb, 'name': SearchDatastore_Task, 'duration_secs': 0.009974} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.784452] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.784452] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 017a06b3-cc1a-4822-a07f-ca881fd4254b/017a06b3-cc1a-4822-a07f-ca881fd4254b.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 903.784452] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a02c7143-551a-4c62-b996-528f363622c1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.795251] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Waiting for the task: (returnval){ [ 903.795251] env[63345]: value = "task-1017406" [ 903.795251] env[63345]: _type = "Task" [ 903.795251] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.807182] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': task-1017406, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.842821] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "a415d4f2-abc7-4553-8442-312316e686b2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.843272] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "a415d4f2-abc7-4553-8442-312316e686b2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.843533] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "a415d4f2-abc7-4553-8442-312316e686b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.843782] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "a415d4f2-abc7-4553-8442-312316e686b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.843979] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "a415d4f2-abc7-4553-8442-312316e686b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.850821] env[63345]: INFO nova.compute.manager [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Terminating instance [ 903.860278] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "5e20b33c-1481-4bd3-b269-29a70cc3150d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.860628] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "5e20b33c-1481-4bd3-b269-29a70cc3150d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.027166] env[63345]: WARNING nova.compute.manager [None req-03e7fb6a-fab2-4661-8c0a-a06ba2f93a3f tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Image not found during snapshot: nova.exception.ImageNotFound: Image 2085dc83-f9bf-4a82-892d-dba5cfcd7897 could not be found. [ 904.185625] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d2f4a2-5eea-48c0-a5a2-bd619730d362 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.194888] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017404, 'name': Rename_Task, 'duration_secs': 0.219667} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.197070] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 904.197414] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4a58864-5358-4f08-9007-f554ee838cf1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.199841] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c0b7a8-5f71-4aa8-9649-770da7d90297 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.237244] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b4a5ff-3ef4-48a4-be73-f47a35798b5f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.239987] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 904.239987] env[63345]: value = "task-1017407" [ 904.239987] env[63345]: _type = "Task" [ 904.239987] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.249883] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017405, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.254043] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11118800-cbe4-42d9-bc52-d78f376cfc67 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.264248] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017407, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.275656] env[63345]: DEBUG nova.compute.provider_tree [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.304089] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': task-1017406, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.360282] env[63345]: DEBUG nova.compute.manager [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 904.360525] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 904.361688] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7eedf93-a1a3-417c-ab0c-e285f72b2946 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.365223] env[63345]: DEBUG nova.compute.utils [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 904.372315] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 904.372671] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-af1c435d-6414-4df0-bbc1-a6bb10c1b137 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.384976] env[63345]: DEBUG oslo_vmware.api [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 904.384976] env[63345]: value = "task-1017408" [ 904.384976] env[63345]: _type = "Task" [ 904.384976] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.393867] env[63345]: DEBUG oslo_vmware.api [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017408, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.541051] env[63345]: DEBUG nova.network.neutron [req-8cfcb62b-ba0b-406f-9777-f55684700f58 req-3bb115de-5578-4acc-ab37-55d3360e008c service nova] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Updated VIF entry in instance network info cache for port 2ddc051b-7b20-4e01-8f07-b53ddf9efada. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 904.541684] env[63345]: DEBUG nova.network.neutron [req-8cfcb62b-ba0b-406f-9777-f55684700f58 req-3bb115de-5578-4acc-ab37-55d3360e008c service nova] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Updating instance_info_cache with network_info: [{"id": "2ddc051b-7b20-4e01-8f07-b53ddf9efada", "address": "fa:16:3e:b9:70:a6", "network": {"id": "f05df594-fc76-4e2d-b29b-6942fee8dc99", "bridge": "br-int", "label": "tempest-ServersTestJSON-241206779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63d7b3facae6416989f763e610cf98f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ddc051b-7b", "ovs_interfaceid": "2ddc051b-7b20-4e01-8f07-b53ddf9efada", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.743107] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017405, 'name': CreateVM_Task, 'duration_secs': 0.596785} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.746560] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 904.747271] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.747524] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.747852] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 904.748470] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-359ae837-cc05-43e1-a3d5-5b29d3488140 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.754192] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017407, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.755519] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 904.755519] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]521eb383-fd2c-4b0f-819f-e3fcd2da8bdc" [ 904.755519] env[63345]: _type = "Task" [ 904.755519] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.763627] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521eb383-fd2c-4b0f-819f-e3fcd2da8bdc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.778797] env[63345]: DEBUG nova.scheduler.client.report [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 904.805492] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': task-1017406, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.868791] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "5e20b33c-1481-4bd3-b269-29a70cc3150d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.898074] env[63345]: DEBUG oslo_vmware.api [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017408, 'name': PowerOffVM_Task, 'duration_secs': 0.257113} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.898463] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 904.898714] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 904.899099] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a3d051d-f3a6-4dd2-8212-fe2557758f43 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.988716] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 904.988982] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 904.989258] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Deleting the datastore file [datastore2] a415d4f2-abc7-4553-8442-312316e686b2 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 904.989542] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-112db926-9cef-45a0-849f-625ae5da7330 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.997731] env[63345]: DEBUG oslo_vmware.api [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 904.997731] env[63345]: value = "task-1017410" [ 904.997731] env[63345]: _type = "Task" [ 904.997731] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.005956] env[63345]: DEBUG oslo_vmware.api [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017410, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.044669] env[63345]: DEBUG oslo_concurrency.lockutils [req-8cfcb62b-ba0b-406f-9777-f55684700f58 req-3bb115de-5578-4acc-ab37-55d3360e008c service nova] Releasing lock "refresh_cache-c84c8b9a-9164-4dd7-b094-dd09c15c6f21" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.254677] env[63345]: DEBUG oslo_vmware.api [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017407, 'name': PowerOnVM_Task, 'duration_secs': 0.719753} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.254957] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 905.255188] env[63345]: INFO nova.compute.manager [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Took 8.76 seconds to spawn the instance on the hypervisor. [ 905.255373] env[63345]: DEBUG nova.compute.manager [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 905.256163] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c111acd-0efe-4c03-acc5-d0ac0f6bf995 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.276437] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521eb383-fd2c-4b0f-819f-e3fcd2da8bdc, 'name': SearchDatastore_Task, 'duration_secs': 0.010312} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.277133] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.277484] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 905.277836] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.278090] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.278369] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 905.278714] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-712058c5-c5b2-46e9-9902-2484669b3e0c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.284226] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.284741] env[63345]: DEBUG nova.compute.manager [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 905.288914] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.067s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.289080] env[63345]: DEBUG nova.objects.instance [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lazy-loading 'resources' on Instance uuid 14198777-9091-4c69-8928-c83135acc7d2 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 905.290509] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 905.290777] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 905.291567] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-651e294b-445d-4489-bb7c-5f548d2bdbd8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.299633] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 905.299633] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5288de61-9b09-2699-e2ee-73ba949f938a" [ 905.299633] env[63345]: _type = "Task" [ 905.299633] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.306184] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': task-1017406, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.312648] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5288de61-9b09-2699-e2ee-73ba949f938a, 'name': SearchDatastore_Task, 'duration_secs': 0.009609} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.314371] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c563e29-0b8d-4a0d-b5d9-82d1aaefebe9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.319912] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 905.319912] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52125c8d-875d-a6f0-1dd2-1ac8058604d1" [ 905.319912] env[63345]: _type = "Task" [ 905.319912] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.328087] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52125c8d-875d-a6f0-1dd2-1ac8058604d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.444939] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.445250] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.445464] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.445654] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.445837] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.448220] env[63345]: INFO nova.compute.manager [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Terminating instance [ 905.507707] env[63345]: DEBUG oslo_vmware.api [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017410, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182884} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.507966] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 905.508178] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 905.508366] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 905.508541] env[63345]: INFO nova.compute.manager [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Took 1.15 seconds to destroy the instance on the hypervisor. [ 905.508779] env[63345]: DEBUG oslo.service.loopingcall [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 905.508969] env[63345]: DEBUG nova.compute.manager [-] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 905.509079] env[63345]: DEBUG nova.network.neutron [-] [instance: a415d4f2-abc7-4553-8442-312316e686b2] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 905.786563] env[63345]: INFO nova.compute.manager [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Took 38.67 seconds to build instance. [ 905.790370] env[63345]: DEBUG nova.compute.utils [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 905.790896] env[63345]: DEBUG nova.compute.manager [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 905.791111] env[63345]: DEBUG nova.network.neutron [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 905.808681] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': task-1017406, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.633597} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.808681] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 017a06b3-cc1a-4822-a07f-ca881fd4254b/017a06b3-cc1a-4822-a07f-ca881fd4254b.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 905.808838] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 905.808956] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e78c1d82-4042-4dc3-ae26-2b446b3158c3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.817255] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Waiting for the task: (returnval){ [ 905.817255] env[63345]: value = "task-1017411" [ 905.817255] env[63345]: _type = "Task" [ 905.817255] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.830949] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52125c8d-875d-a6f0-1dd2-1ac8058604d1, 'name': SearchDatastore_Task, 'duration_secs': 0.009131} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.833776] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.834027] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] c84c8b9a-9164-4dd7-b094-dd09c15c6f21/c84c8b9a-9164-4dd7-b094-dd09c15c6f21.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 905.834315] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': task-1017411, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.837368] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d67e8b4-de2a-4e08-8ad4-a4e4118b836c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.843236] env[63345]: DEBUG nova.policy [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dd618fef89a843209784ca9e925d18eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cb91ecf5d00e48dea9baf2122ac4fed7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 905.850947] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 905.850947] env[63345]: value = "task-1017412" [ 905.850947] env[63345]: _type = "Task" [ 905.850947] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.867114] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017412, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.951734] env[63345]: DEBUG nova.compute.manager [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 905.951996] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 905.952901] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4515c5-5578-4678-952f-1fa502571711 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.964023] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 905.966440] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73f055ce-f3e7-4a2e-9c21-06f27c0e4070 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.976117] env[63345]: DEBUG oslo_vmware.api [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 905.976117] env[63345]: value = "task-1017413" [ 905.976117] env[63345]: _type = "Task" [ 905.976117] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.986904] env[63345]: DEBUG oslo_vmware.api [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017413, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.126063] env[63345]: DEBUG nova.compute.manager [req-593c805d-1c60-4773-88ab-2f34456482a9 req-14238258-044f-4b7c-b26b-ef4b129f380b service nova] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Received event network-vif-deleted-f2c021c6-dbd4-40da-80c8-19678be6d78c {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 906.126429] env[63345]: INFO nova.compute.manager [req-593c805d-1c60-4773-88ab-2f34456482a9 req-14238258-044f-4b7c-b26b-ef4b129f380b service nova] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Neutron deleted interface f2c021c6-dbd4-40da-80c8-19678be6d78c; detaching it from the instance and deleting it from the info cache [ 906.126429] env[63345]: DEBUG nova.network.neutron [req-593c805d-1c60-4773-88ab-2f34456482a9 req-14238258-044f-4b7c-b26b-ef4b129f380b service nova] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.168099] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e368a42-5bb6-434a-999b-36046b898238 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.173523] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "5e20b33c-1481-4bd3-b269-29a70cc3150d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.173523] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "5e20b33c-1481-4bd3-b269-29a70cc3150d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.173847] env[63345]: INFO nova.compute.manager [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Attaching volume 5e6c8d6d-97f5-444a-b63d-e2544785247a to /dev/sdb [ 906.179031] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7277ee4-63c9-4fcf-a569-296423a87804 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.234787] env[63345]: DEBUG nova.network.neutron [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Successfully created port: 277d5619-4a4c-4f02-9ce7-786f57c7dc46 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 906.237417] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57287023-4f8d-46f1-9e33-96ca2c47a30b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.240666] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e58d91-ce1f-4c8d-b7b1-2db0c7d36881 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.250681] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3660ec23-81bc-47c8-b167-74723f0b18e3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.254322] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91c8cf5-6e0d-4930-b3c3-519024d3d4d1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.270945] env[63345]: DEBUG nova.compute.provider_tree [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.275833] env[63345]: DEBUG nova.virt.block_device [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Updating existing volume attachment record: 2bb3dbde-a0e6-4a57-9f95-2ac975a6b3a3 {{(pid=63345) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 906.288667] env[63345]: DEBUG oslo_concurrency.lockutils [None req-13339675-0eda-4aa7-bd3b-fc12fcd246f3 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "27e2cb12-d251-434a-b79e-6fbda80d3637" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.193s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.297862] env[63345]: DEBUG nova.compute.manager [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 906.329380] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': task-1017411, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077629} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.329943] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 906.330857] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ba4735-744a-48e8-9313-7dff7a0fc4f7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.357247] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 017a06b3-cc1a-4822-a07f-ca881fd4254b/017a06b3-cc1a-4822-a07f-ca881fd4254b.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 906.358080] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-440ccb67-371b-4a0c-a349-da0e69017e94 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.384832] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017412, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.386995] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Waiting for the task: (returnval){ [ 906.386995] env[63345]: value = "task-1017414" [ 906.386995] env[63345]: _type = "Task" [ 906.386995] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.398317] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': task-1017414, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.467271] env[63345]: DEBUG nova.network.neutron [-] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.490926] env[63345]: DEBUG oslo_vmware.api [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017413, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.632031] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-27474b64-6850-4d88-88aa-09009822bc35 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.641820] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8189ec-813b-4237-b100-ce273220a358 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.678109] env[63345]: DEBUG nova.compute.manager [req-593c805d-1c60-4773-88ab-2f34456482a9 req-14238258-044f-4b7c-b26b-ef4b129f380b service nova] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Detach interface failed, port_id=f2c021c6-dbd4-40da-80c8-19678be6d78c, reason: Instance a415d4f2-abc7-4553-8442-312316e686b2 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 906.779646] env[63345]: DEBUG nova.scheduler.client.report [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 906.871237] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017412, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538727} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.871618] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] c84c8b9a-9164-4dd7-b094-dd09c15c6f21/c84c8b9a-9164-4dd7-b094-dd09c15c6f21.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 906.871974] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 906.872371] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b1c6b35e-1e2e-4e12-90ba-9a052e4c8a55 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.880793] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 906.880793] env[63345]: value = "task-1017418" [ 906.880793] env[63345]: _type = "Task" [ 906.880793] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.896668] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017418, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.908779] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': task-1017414, 'name': ReconfigVM_Task, 'duration_secs': 0.378217} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.909502] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 017a06b3-cc1a-4822-a07f-ca881fd4254b/017a06b3-cc1a-4822-a07f-ca881fd4254b.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 906.910187] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3f3da4b8-c7ae-44cc-9e65-38cb60341d58 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.917342] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Waiting for the task: (returnval){ [ 906.917342] env[63345]: value = "task-1017419" [ 906.917342] env[63345]: _type = "Task" [ 906.917342] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.926625] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': task-1017419, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.975178] env[63345]: INFO nova.compute.manager [-] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Took 1.46 seconds to deallocate network for instance. [ 906.988931] env[63345]: DEBUG oslo_vmware.api [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017413, 'name': PowerOffVM_Task, 'duration_secs': 0.523137} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.995607] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 906.995607] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 906.996600] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28a6211b-c9c6-4aed-96f4-e7cf04ef9c69 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.075484] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 907.075794] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 907.076158] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Deleting the datastore file [datastore2] 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 907.076353] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b68cc05-b7a8-41c1-a5a3-331baa639bf7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.084286] env[63345]: DEBUG oslo_vmware.api [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for the task: (returnval){ [ 907.084286] env[63345]: value = "task-1017421" [ 907.084286] env[63345]: _type = "Task" [ 907.084286] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.093502] env[63345]: DEBUG oslo_vmware.api [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017421, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.207032] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "27e2cb12-d251-434a-b79e-6fbda80d3637" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.209021] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "27e2cb12-d251-434a-b79e-6fbda80d3637" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.209343] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "27e2cb12-d251-434a-b79e-6fbda80d3637-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.209607] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "27e2cb12-d251-434a-b79e-6fbda80d3637-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.209809] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "27e2cb12-d251-434a-b79e-6fbda80d3637-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.212243] env[63345]: INFO nova.compute.manager [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Terminating instance [ 907.286370] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.997s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.288664] env[63345]: DEBUG oslo_concurrency.lockutils [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.906s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.288911] env[63345]: DEBUG nova.objects.instance [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lazy-loading 'resources' on Instance uuid 9aa651b8-317d-4153-8c33-9df0a5d16115 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 907.308797] env[63345]: DEBUG nova.compute.manager [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 907.359336] env[63345]: DEBUG nova.virt.hardware [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 907.360021] env[63345]: DEBUG nova.virt.hardware [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 907.360021] env[63345]: DEBUG nova.virt.hardware [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 907.360021] env[63345]: DEBUG nova.virt.hardware [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 907.360282] env[63345]: DEBUG nova.virt.hardware [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 907.360282] env[63345]: DEBUG nova.virt.hardware [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 907.360463] env[63345]: DEBUG nova.virt.hardware [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 907.360633] env[63345]: DEBUG nova.virt.hardware [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 907.360815] env[63345]: DEBUG nova.virt.hardware [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 907.360984] env[63345]: DEBUG nova.virt.hardware [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 907.361209] env[63345]: DEBUG nova.virt.hardware [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 907.362178] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576d53f9-7ccd-4adf-a81c-32ce1799f3d7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.365730] env[63345]: INFO nova.scheduler.client.report [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Deleted allocations for instance 14198777-9091-4c69-8928-c83135acc7d2 [ 907.373355] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea39873-b616-4231-95e6-92485dd128b5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.396795] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017418, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093179} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.397096] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 907.397863] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b7bf31-b1b8-45ba-b559-ee2faecef804 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.427734] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] c84c8b9a-9164-4dd7-b094-dd09c15c6f21/c84c8b9a-9164-4dd7-b094-dd09c15c6f21.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 907.428138] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-590fc4ad-e492-4432-b174-3e1b01526737 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.458298] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': task-1017419, 'name': Rename_Task, 'duration_secs': 0.421836} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.458298] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 907.458298] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 907.458298] env[63345]: value = "task-1017422" [ 907.458298] env[63345]: _type = "Task" [ 907.458298] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.458298] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2aacd531-a84a-4365-9594-5fc127063f9e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.463660] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Waiting for the task: (returnval){ [ 907.463660] env[63345]: value = "task-1017423" [ 907.463660] env[63345]: _type = "Task" [ 907.463660] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.466984] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017422, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.477958] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': task-1017423, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.486818] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.596322] env[63345]: DEBUG oslo_vmware.api [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Task: {'id': task-1017421, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.408123} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.596666] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 907.596948] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 907.597233] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 907.597486] env[63345]: INFO nova.compute.manager [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Took 1.65 seconds to destroy the instance on the hypervisor. [ 907.597818] env[63345]: DEBUG oslo.service.loopingcall [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 907.598098] env[63345]: DEBUG nova.compute.manager [-] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 907.598260] env[63345]: DEBUG nova.network.neutron [-] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 907.715901] env[63345]: DEBUG nova.compute.manager [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 907.716227] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 907.717185] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d177281-d53d-4f7f-a75c-db2829b278cd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.726842] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 907.726842] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a91c0c2-4311-4667-b574-8d475673e826 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.736898] env[63345]: DEBUG oslo_vmware.api [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 907.736898] env[63345]: value = "task-1017424" [ 907.736898] env[63345]: _type = "Task" [ 907.736898] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.747049] env[63345]: DEBUG oslo_vmware.api [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017424, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.792077] env[63345]: DEBUG nova.objects.instance [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lazy-loading 'numa_topology' on Instance uuid 9aa651b8-317d-4153-8c33-9df0a5d16115 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 907.876718] env[63345]: DEBUG nova.compute.manager [req-03524ee7-7f8e-4e4e-9b06-4a2702df9c62 req-dd6ed3c5-c06a-4815-bbec-bdf2aa2f4184 service nova] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Received event network-vif-plugged-277d5619-4a4c-4f02-9ce7-786f57c7dc46 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 907.877042] env[63345]: DEBUG oslo_concurrency.lockutils [req-03524ee7-7f8e-4e4e-9b06-4a2702df9c62 req-dd6ed3c5-c06a-4815-bbec-bdf2aa2f4184 service nova] Acquiring lock "4868a0a0-ca35-44b0-a90c-124aa366af76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.877349] env[63345]: DEBUG oslo_concurrency.lockutils [req-03524ee7-7f8e-4e4e-9b06-4a2702df9c62 req-dd6ed3c5-c06a-4815-bbec-bdf2aa2f4184 service nova] Lock "4868a0a0-ca35-44b0-a90c-124aa366af76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.877483] env[63345]: DEBUG oslo_concurrency.lockutils [req-03524ee7-7f8e-4e4e-9b06-4a2702df9c62 req-dd6ed3c5-c06a-4815-bbec-bdf2aa2f4184 service nova] Lock "4868a0a0-ca35-44b0-a90c-124aa366af76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.877709] env[63345]: DEBUG nova.compute.manager [req-03524ee7-7f8e-4e4e-9b06-4a2702df9c62 req-dd6ed3c5-c06a-4815-bbec-bdf2aa2f4184 service nova] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] No waiting events found dispatching network-vif-plugged-277d5619-4a4c-4f02-9ce7-786f57c7dc46 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 907.877934] env[63345]: WARNING nova.compute.manager [req-03524ee7-7f8e-4e4e-9b06-4a2702df9c62 req-dd6ed3c5-c06a-4815-bbec-bdf2aa2f4184 service nova] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Received unexpected event network-vif-plugged-277d5619-4a4c-4f02-9ce7-786f57c7dc46 for instance with vm_state building and task_state spawning. [ 907.878573] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f03dc436-4cdc-4f95-af05-907d03ccf506 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "14198777-9091-4c69-8928-c83135acc7d2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.096s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.957126] env[63345]: DEBUG nova.network.neutron [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Successfully updated port: 277d5619-4a4c-4f02-9ce7-786f57c7dc46 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 907.969582] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017422, 'name': ReconfigVM_Task, 'duration_secs': 0.427133} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.973565] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Reconfigured VM instance instance-00000055 to attach disk [datastore2] c84c8b9a-9164-4dd7-b094-dd09c15c6f21/c84c8b9a-9164-4dd7-b094-dd09c15c6f21.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 907.974396] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23560a7c-c362-4457-8a3d-fcc974ed7c4f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.984495] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': task-1017423, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.987075] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 907.987075] env[63345]: value = "task-1017425" [ 907.987075] env[63345]: _type = "Task" [ 907.987075] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.998232] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017425, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.246416] env[63345]: DEBUG oslo_vmware.api [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017424, 'name': PowerOffVM_Task, 'duration_secs': 0.232144} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.246715] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 908.246893] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 908.247170] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-052ada5b-07bf-4522-903e-6b2ebb5327a1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.294809] env[63345]: DEBUG nova.objects.base [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Object Instance<9aa651b8-317d-4153-8c33-9df0a5d16115> lazy-loaded attributes: resources,numa_topology {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 908.318831] env[63345]: DEBUG nova.compute.manager [req-7bb28cb6-865e-4da3-9eea-0ed7417e6533 req-dcc50941-be4b-475a-9d29-34b0b03b54dd service nova] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Received event network-vif-deleted-a8449910-e73e-4fd5-a8c3-8833ab272413 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 908.318831] env[63345]: INFO nova.compute.manager [req-7bb28cb6-865e-4da3-9eea-0ed7417e6533 req-dcc50941-be4b-475a-9d29-34b0b03b54dd service nova] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Neutron deleted interface a8449910-e73e-4fd5-a8c3-8833ab272413; detaching it from the instance and deleting it from the info cache [ 908.318831] env[63345]: DEBUG nova.network.neutron [req-7bb28cb6-865e-4da3-9eea-0ed7417e6533 req-dcc50941-be4b-475a-9d29-34b0b03b54dd service nova] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.330341] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 908.330990] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 908.330990] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Deleting the datastore file [datastore2] 27e2cb12-d251-434a-b79e-6fbda80d3637 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 908.331314] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-89239ec1-0777-4799-92f7-95ad179d9f4b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.342342] env[63345]: DEBUG oslo_vmware.api [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for the task: (returnval){ [ 908.342342] env[63345]: value = "task-1017427" [ 908.342342] env[63345]: _type = "Task" [ 908.342342] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.351377] env[63345]: DEBUG oslo_vmware.api [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017427, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.463865] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "refresh_cache-4868a0a0-ca35-44b0-a90c-124aa366af76" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.464009] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired lock "refresh_cache-4868a0a0-ca35-44b0-a90c-124aa366af76" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.464156] env[63345]: DEBUG nova.network.neutron [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 908.482472] env[63345]: DEBUG oslo_vmware.api [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': task-1017423, 'name': PowerOnVM_Task, 'duration_secs': 0.579176} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.482472] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 908.482472] env[63345]: INFO nova.compute.manager [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Took 9.53 seconds to spawn the instance on the hypervisor. [ 908.482472] env[63345]: DEBUG nova.compute.manager [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 908.482472] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a124e5-f3fd-4ba7-8e55-e09fda1f961c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.500787] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017425, 'name': Rename_Task, 'duration_secs': 0.173128} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.501417] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 908.503917] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0ebd472-fdff-4cb0-8712-0773c725f765 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.511656] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 908.511656] env[63345]: value = "task-1017428" [ 908.511656] env[63345]: _type = "Task" [ 908.511656] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.521183] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017428, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.674021] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e73c83f-c490-4596-a8bb-296a127fc16b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.681980] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75326e2b-5200-4fa7-9939-93fd2b1ec515 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.710823] env[63345]: DEBUG nova.network.neutron [-] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.712826] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8c1e5e-f7ce-43d4-afcb-7b8cf86d5b21 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.721149] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0f3e14-d93c-476e-992a-f63ee117586f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.736517] env[63345]: DEBUG nova.compute.provider_tree [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.822177] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5b34b0e1-9684-49da-a14d-7a50bfb8873c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.832083] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bde1459-1508-4c6a-908b-4526a881e09a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.852847] env[63345]: DEBUG oslo_vmware.api [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Task: {'id': task-1017427, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.287862} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.852847] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.852847] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 908.852847] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 908.852847] env[63345]: INFO nova.compute.manager [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Took 1.14 seconds to destroy the instance on the hypervisor. [ 908.853155] env[63345]: DEBUG oslo.service.loopingcall [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 908.853194] env[63345]: DEBUG nova.compute.manager [-] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 908.853328] env[63345]: DEBUG nova.network.neutron [-] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 908.863179] env[63345]: DEBUG nova.compute.manager [req-7bb28cb6-865e-4da3-9eea-0ed7417e6533 req-dcc50941-be4b-475a-9d29-34b0b03b54dd service nova] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Detach interface failed, port_id=a8449910-e73e-4fd5-a8c3-8833ab272413, reason: Instance 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 908.999323] env[63345]: DEBUG nova.network.neutron [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 909.008594] env[63345]: INFO nova.compute.manager [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Took 41.04 seconds to build instance. [ 909.022499] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017428, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.172731] env[63345]: DEBUG nova.network.neutron [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Updating instance_info_cache with network_info: [{"id": "277d5619-4a4c-4f02-9ce7-786f57c7dc46", "address": "fa:16:3e:78:ff:28", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap277d5619-4a", "ovs_interfaceid": "277d5619-4a4c-4f02-9ce7-786f57c7dc46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.216140] env[63345]: INFO nova.compute.manager [-] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Took 1.62 seconds to deallocate network for instance. [ 909.240629] env[63345]: DEBUG nova.scheduler.client.report [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 909.512466] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac266c25-8bf0-4987-be04-669beca1781a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Lock "017a06b3-cc1a-4822-a07f-ca881fd4254b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.556s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.525165] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017428, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.675384] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Releasing lock "refresh_cache-4868a0a0-ca35-44b0-a90c-124aa366af76" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.675719] env[63345]: DEBUG nova.compute.manager [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Instance network_info: |[{"id": "277d5619-4a4c-4f02-9ce7-786f57c7dc46", "address": "fa:16:3e:78:ff:28", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap277d5619-4a", "ovs_interfaceid": "277d5619-4a4c-4f02-9ce7-786f57c7dc46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 909.676269] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:ff:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '68add7d6-c025-46fa-84d3-9c589adb63e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '277d5619-4a4c-4f02-9ce7-786f57c7dc46', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 909.684782] env[63345]: DEBUG oslo.service.loopingcall [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 909.685123] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 909.685420] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-889ee42a-8d39-426c-8dbf-158d59fcb35f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.707884] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 909.707884] env[63345]: value = "task-1017431" [ 909.707884] env[63345]: _type = "Task" [ 909.707884] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.718232] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017431, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.725415] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.746585] env[63345]: DEBUG oslo_concurrency.lockutils [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.458s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.749179] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 20.036s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.749389] env[63345]: DEBUG nova.objects.instance [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63345) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 909.882610] env[63345]: DEBUG nova.network.neutron [-] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.023662] env[63345]: DEBUG oslo_vmware.api [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017428, 'name': PowerOnVM_Task, 'duration_secs': 1.070039} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.025324] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 910.025543] env[63345]: INFO nova.compute.manager [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Took 8.63 seconds to spawn the instance on the hypervisor. [ 910.025729] env[63345]: DEBUG nova.compute.manager [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 910.026873] env[63345]: DEBUG nova.compute.manager [req-ce8205ea-e484-4c7e-8f97-aef97c1e4839 req-3ee074ae-8f70-495e-8d45-b602e538289e service nova] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Received event network-changed-277d5619-4a4c-4f02-9ce7-786f57c7dc46 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 910.027073] env[63345]: DEBUG nova.compute.manager [req-ce8205ea-e484-4c7e-8f97-aef97c1e4839 req-3ee074ae-8f70-495e-8d45-b602e538289e service nova] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Refreshing instance network info cache due to event network-changed-277d5619-4a4c-4f02-9ce7-786f57c7dc46. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 910.027296] env[63345]: DEBUG oslo_concurrency.lockutils [req-ce8205ea-e484-4c7e-8f97-aef97c1e4839 req-3ee074ae-8f70-495e-8d45-b602e538289e service nova] Acquiring lock "refresh_cache-4868a0a0-ca35-44b0-a90c-124aa366af76" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.027447] env[63345]: DEBUG oslo_concurrency.lockutils [req-ce8205ea-e484-4c7e-8f97-aef97c1e4839 req-3ee074ae-8f70-495e-8d45-b602e538289e service nova] Acquired lock "refresh_cache-4868a0a0-ca35-44b0-a90c-124aa366af76" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.027611] env[63345]: DEBUG nova.network.neutron [req-ce8205ea-e484-4c7e-8f97-aef97c1e4839 req-3ee074ae-8f70-495e-8d45-b602e538289e service nova] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Refreshing network info cache for port 277d5619-4a4c-4f02-9ce7-786f57c7dc46 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 910.029367] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a03925a-0aa7-4ca5-8664-120f09b1581a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.220311] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017431, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.260778] env[63345]: DEBUG oslo_concurrency.lockutils [None req-65ca7b5f-dce5-485f-9abd-3a293411dcd5 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "9aa651b8-317d-4153-8c33-9df0a5d16115" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 47.347s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.262213] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "9aa651b8-317d-4153-8c33-9df0a5d16115" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 21.794s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.262717] env[63345]: INFO nova.compute.manager [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Unshelving [ 910.372909] env[63345]: DEBUG nova.compute.manager [req-3ab41906-8fe2-4419-a857-82d4fad94b4b req-e541650d-2128-4756-bc0e-7598e3868632 service nova] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Received event network-vif-deleted-a624b77d-1b1f-4acb-8c80-d8c6fb0360b9 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 910.384837] env[63345]: INFO nova.compute.manager [-] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Took 1.53 seconds to deallocate network for instance. [ 910.548411] env[63345]: INFO nova.compute.manager [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Took 36.78 seconds to build instance. [ 910.720598] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017431, 'name': CreateVM_Task, 'duration_secs': 0.727402} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.720806] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 910.721591] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.721813] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.722216] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 910.722513] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3379b301-d17b-4ad1-8b17-be6e32dfe17e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.728189] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 910.728189] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52146cee-12b1-35ba-0638-e36184e169dc" [ 910.728189] env[63345]: _type = "Task" [ 910.728189] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.739129] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52146cee-12b1-35ba-0638-e36184e169dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.762714] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b74fe5a9-3eaa-4b7e-a10d-275a3b5b001c tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.763863] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.155s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.765304] env[63345]: INFO nova.compute.claims [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 910.892264] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.979686] env[63345]: DEBUG nova.network.neutron [req-ce8205ea-e484-4c7e-8f97-aef97c1e4839 req-3ee074ae-8f70-495e-8d45-b602e538289e service nova] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Updated VIF entry in instance network info cache for port 277d5619-4a4c-4f02-9ce7-786f57c7dc46. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 910.979748] env[63345]: DEBUG nova.network.neutron [req-ce8205ea-e484-4c7e-8f97-aef97c1e4839 req-3ee074ae-8f70-495e-8d45-b602e538289e service nova] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Updating instance_info_cache with network_info: [{"id": "277d5619-4a4c-4f02-9ce7-786f57c7dc46", "address": "fa:16:3e:78:ff:28", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap277d5619-4a", "ovs_interfaceid": "277d5619-4a4c-4f02-9ce7-786f57c7dc46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.054108] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1e78382-79d0-4667-8510-af1b31e58a57 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "c84c8b9a-9164-4dd7-b094-dd09c15c6f21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.296s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.241394] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52146cee-12b1-35ba-0638-e36184e169dc, 'name': SearchDatastore_Task, 'duration_secs': 0.010701} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.241742] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.241982] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 911.242238] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.242389] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.242611] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 911.242890] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d51b21c-c63d-477c-85a0-6ac5cafd3803 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.254533] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 911.254721] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 911.255446] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-823df2f0-4eda-45ab-9015-bcbf53858c67 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.260897] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 911.260897] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52af372e-4e01-9f83-bde9-e788a65ebf36" [ 911.260897] env[63345]: _type = "Task" [ 911.260897] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.272998] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52af372e-4e01-9f83-bde9-e788a65ebf36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.275462] env[63345]: DEBUG nova.compute.utils [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 911.356452] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Volume attach. Driver type: vmdk {{(pid=63345) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 911.357603] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226098', 'volume_id': '5e6c8d6d-97f5-444a-b63d-e2544785247a', 'name': 'volume-5e6c8d6d-97f5-444a-b63d-e2544785247a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5e20b33c-1481-4bd3-b269-29a70cc3150d', 'attached_at': '', 'detached_at': '', 'volume_id': '5e6c8d6d-97f5-444a-b63d-e2544785247a', 'serial': '5e6c8d6d-97f5-444a-b63d-e2544785247a'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 911.358420] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a1589e-a665-4501-9e23-43600f8bc9ea {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.376629] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8639e2a1-aeb4-4fb5-a11d-6e5d410ca539 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.408399] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] volume-5e6c8d6d-97f5-444a-b63d-e2544785247a/volume-5e6c8d6d-97f5-444a-b63d-e2544785247a.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 911.409520] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69d5b284-691e-4af3-adf4-f484145df693 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.429851] env[63345]: DEBUG oslo_vmware.api [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 911.429851] env[63345]: value = "task-1017432" [ 911.429851] env[63345]: _type = "Task" [ 911.429851] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.438908] env[63345]: DEBUG oslo_vmware.api [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017432, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.483345] env[63345]: DEBUG oslo_concurrency.lockutils [req-ce8205ea-e484-4c7e-8f97-aef97c1e4839 req-3ee074ae-8f70-495e-8d45-b602e538289e service nova] Releasing lock "refresh_cache-4868a0a0-ca35-44b0-a90c-124aa366af76" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.778391] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52af372e-4e01-9f83-bde9-e788a65ebf36, 'name': SearchDatastore_Task, 'duration_secs': 0.027545} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.779634] env[63345]: INFO nova.virt.block_device [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Booting with volume 78ab0d16-eec6-45ab-badd-c9109f513975 at /dev/sdb [ 911.782233] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be15b664-74f1-4e31-8ad2-e2fea2c8f7d4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.794896] env[63345]: DEBUG oslo_concurrency.lockutils [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "49cf9c08-4024-40aa-9370-7b4f8d89e2cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.794896] env[63345]: DEBUG oslo_concurrency.lockutils [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "49cf9c08-4024-40aa-9370-7b4f8d89e2cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.797949] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 911.797949] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]526b260e-e02f-199a-b120-1788fba89327" [ 911.797949] env[63345]: _type = "Task" [ 911.797949] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.808571] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526b260e-e02f-199a-b120-1788fba89327, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.849355] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7ae58e96-12f4-40a8-b1d2-4a73a1a8eed2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.861484] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eaffdd0-3594-4610-8e98-3b0966b30943 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.899605] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-991fa28a-03a2-4b8d-80cf-46b984d501af {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.912018] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38efdd1c-e89c-48bc-a7f6-158c101d3448 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.939896] env[63345]: DEBUG oslo_vmware.api [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017432, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.952674] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b046d2b-3ee9-45f6-8bef-79417d4d6713 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.965444] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-996bd321-bbe2-4422-823d-7df3d2947ce5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.984093] env[63345]: DEBUG nova.virt.block_device [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Updating existing volume attachment record: 0681894b-ec62-4526-9435-2a32413950f8 {{(pid=63345) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 912.184809] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa1902c-c6f7-47a1-bbf7-6fff53d6ead3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.192931] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4799a844-82ee-4e4b-8742-f7a344bac35b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.226595] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6c0488-a225-4db3-bced-f301249b513c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.235451] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b009a0e5-ce33-4d19-a8fa-6a30cd6695f8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.251676] env[63345]: DEBUG nova.compute.provider_tree [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.299484] env[63345]: DEBUG nova.compute.manager [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 912.313649] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526b260e-e02f-199a-b120-1788fba89327, 'name': SearchDatastore_Task, 'duration_secs': 0.019417} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.314052] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.314286] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 4868a0a0-ca35-44b0-a90c-124aa366af76/4868a0a0-ca35-44b0-a90c-124aa366af76.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 912.314563] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad469ffb-ef5b-4cdf-84b3-7233dfaa8262 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.323431] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 912.323431] env[63345]: value = "task-1017436" [ 912.323431] env[63345]: _type = "Task" [ 912.323431] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.334226] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017436, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.441236] env[63345]: DEBUG oslo_vmware.api [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017432, 'name': ReconfigVM_Task, 'duration_secs': 0.676932} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.441588] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Reconfigured VM instance instance-0000004d to attach disk [datastore2] volume-5e6c8d6d-97f5-444a-b63d-e2544785247a/volume-5e6c8d6d-97f5-444a-b63d-e2544785247a.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 912.446420] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5248ddc-6ae1-4456-bf34-c1fcaae57380 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.459128] env[63345]: DEBUG oslo_concurrency.lockutils [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.459449] env[63345]: DEBUG oslo_concurrency.lockutils [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.459676] env[63345]: DEBUG oslo_concurrency.lockutils [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.459872] env[63345]: DEBUG oslo_concurrency.lockutils [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.460094] env[63345]: DEBUG oslo_concurrency.lockutils [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.463045] env[63345]: DEBUG oslo_vmware.api [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 912.463045] env[63345]: value = "task-1017437" [ 912.463045] env[63345]: _type = "Task" [ 912.463045] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.463527] env[63345]: INFO nova.compute.manager [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Terminating instance [ 912.476656] env[63345]: DEBUG oslo_vmware.api [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017437, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.696126] env[63345]: DEBUG nova.compute.manager [req-60d9cd7e-69e9-4605-9b5c-271f086a5859 req-caa62c71-6957-40a2-aa38-d78d84e8aac7 service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Received event network-changed-d861c19b-10d8-47c4-90d2-a823d9faa164 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 912.696545] env[63345]: DEBUG nova.compute.manager [req-60d9cd7e-69e9-4605-9b5c-271f086a5859 req-caa62c71-6957-40a2-aa38-d78d84e8aac7 service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Refreshing instance network info cache due to event network-changed-d861c19b-10d8-47c4-90d2-a823d9faa164. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 912.696809] env[63345]: DEBUG oslo_concurrency.lockutils [req-60d9cd7e-69e9-4605-9b5c-271f086a5859 req-caa62c71-6957-40a2-aa38-d78d84e8aac7 service nova] Acquiring lock "refresh_cache-017a06b3-cc1a-4822-a07f-ca881fd4254b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.697059] env[63345]: DEBUG oslo_concurrency.lockutils [req-60d9cd7e-69e9-4605-9b5c-271f086a5859 req-caa62c71-6957-40a2-aa38-d78d84e8aac7 service nova] Acquired lock "refresh_cache-017a06b3-cc1a-4822-a07f-ca881fd4254b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.697356] env[63345]: DEBUG nova.network.neutron [req-60d9cd7e-69e9-4605-9b5c-271f086a5859 req-caa62c71-6957-40a2-aa38-d78d84e8aac7 service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Refreshing network info cache for port d861c19b-10d8-47c4-90d2-a823d9faa164 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 912.756042] env[63345]: DEBUG nova.scheduler.client.report [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 912.836750] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017436, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.854040] env[63345]: DEBUG oslo_concurrency.lockutils [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.970323] env[63345]: DEBUG nova.compute.manager [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 912.970553] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 912.971341] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cdcdc2c-f6c5-4848-8061-5a07a6fae8a6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.976938] env[63345]: DEBUG oslo_vmware.api [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017437, 'name': ReconfigVM_Task, 'duration_secs': 0.227977} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.977558] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226098', 'volume_id': '5e6c8d6d-97f5-444a-b63d-e2544785247a', 'name': 'volume-5e6c8d6d-97f5-444a-b63d-e2544785247a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5e20b33c-1481-4bd3-b269-29a70cc3150d', 'attached_at': '', 'detached_at': '', 'volume_id': '5e6c8d6d-97f5-444a-b63d-e2544785247a', 'serial': '5e6c8d6d-97f5-444a-b63d-e2544785247a'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 912.985222] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 912.985461] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bd368d20-8147-456f-ab30-e52e5fc09fce {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.992315] env[63345]: DEBUG oslo_vmware.api [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 912.992315] env[63345]: value = "task-1017438" [ 912.992315] env[63345]: _type = "Task" [ 912.992315] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.003242] env[63345]: DEBUG oslo_vmware.api [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017438, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.261318] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.497s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.262114] env[63345]: DEBUG nova.compute.manager [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 913.264492] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 19.362s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.268051] env[63345]: DEBUG nova.objects.instance [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63345) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 913.340044] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017436, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.617477} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.340716] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 4868a0a0-ca35-44b0-a90c-124aa366af76/4868a0a0-ca35-44b0-a90c-124aa366af76.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 913.340716] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 913.340901] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fbe8370b-d37a-4379-8fee-6e067e1d6dcc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.350454] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 913.350454] env[63345]: value = "task-1017439" [ 913.350454] env[63345]: _type = "Task" [ 913.350454] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.359380] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017439, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.505808] env[63345]: DEBUG oslo_vmware.api [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017438, 'name': PowerOffVM_Task, 'duration_secs': 0.466815} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.511834] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 913.511999] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 913.513889] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8df3039-1d15-43de-b379-f0b1582840e2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.637238] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 913.637482] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 913.637716] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Deleting the datastore file [datastore2] bc9d2e6a-f77a-4a21-90bc-81949cbfce91 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 913.638010] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bffd06b7-a7df-4bb5-8e01-d13153a4f8cd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.646042] env[63345]: DEBUG oslo_vmware.api [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for the task: (returnval){ [ 913.646042] env[63345]: value = "task-1017441" [ 913.646042] env[63345]: _type = "Task" [ 913.646042] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.655531] env[63345]: DEBUG oslo_vmware.api [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017441, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.690630] env[63345]: DEBUG nova.network.neutron [req-60d9cd7e-69e9-4605-9b5c-271f086a5859 req-caa62c71-6957-40a2-aa38-d78d84e8aac7 service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Updated VIF entry in instance network info cache for port d861c19b-10d8-47c4-90d2-a823d9faa164. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 913.691053] env[63345]: DEBUG nova.network.neutron [req-60d9cd7e-69e9-4605-9b5c-271f086a5859 req-caa62c71-6957-40a2-aa38-d78d84e8aac7 service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Updating instance_info_cache with network_info: [{"id": "d861c19b-10d8-47c4-90d2-a823d9faa164", "address": "fa:16:3e:c9:2c:68", "network": {"id": "5a619899-0632-4b47-a853-63998d2913e7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1507203599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e9c7157843047ac8203d4fc5261572a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd861c19b-10", "ovs_interfaceid": "d861c19b-10d8-47c4-90d2-a823d9faa164", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.768709] env[63345]: DEBUG nova.compute.utils [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 913.774596] env[63345]: DEBUG nova.compute.manager [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 913.774809] env[63345]: DEBUG nova.network.neutron [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 913.859403] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017439, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075129} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.859675] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 913.860491] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ac010cd-df5f-4f22-8b26-c162a054a321 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.883311] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 4868a0a0-ca35-44b0-a90c-124aa366af76/4868a0a0-ca35-44b0-a90c-124aa366af76.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 913.884797] env[63345]: DEBUG nova.policy [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb6730bb6292421e8f943bce2e912bef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c535ae9067ab4e8a87e95c68af4624fb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 913.887250] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd70b26b-e11a-48d1-a068-cc5ff93efcd4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.910134] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 913.910134] env[63345]: value = "task-1017442" [ 913.910134] env[63345]: _type = "Task" [ 913.910134] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.917267] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017442, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.042065] env[63345]: DEBUG nova.objects.instance [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lazy-loading 'flavor' on Instance uuid 5e20b33c-1481-4bd3-b269-29a70cc3150d {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 914.158311] env[63345]: DEBUG oslo_vmware.api [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017441, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.194289] env[63345]: DEBUG oslo_concurrency.lockutils [req-60d9cd7e-69e9-4605-9b5c-271f086a5859 req-caa62c71-6957-40a2-aa38-d78d84e8aac7 service nova] Releasing lock "refresh_cache-017a06b3-cc1a-4822-a07f-ca881fd4254b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.277345] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8213c0cb-f14d-49a5-85ba-aa7507dd65ad tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.278445] env[63345]: DEBUG nova.compute.manager [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 914.280677] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.326s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.282873] env[63345]: DEBUG nova.objects.instance [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lazy-loading 'resources' on Instance uuid 75fc8365-bf8d-489e-935f-a5169c6a7e62 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 914.418963] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017442, 'name': ReconfigVM_Task, 'duration_secs': 0.455306} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.419384] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 4868a0a0-ca35-44b0-a90c-124aa366af76/4868a0a0-ca35-44b0-a90c-124aa366af76.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 914.420554] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4dec6ce5-d46a-4608-a101-0bdb9882f3a0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.428349] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 914.428349] env[63345]: value = "task-1017444" [ 914.428349] env[63345]: _type = "Task" [ 914.428349] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.438701] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017444, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.463515] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6240971c-7ff9-467b-8001-cd1fcdf386a9 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "interface-0da64b45-fa00-4fe8-8d1d-df586f27743f-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.463996] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6240971c-7ff9-467b-8001-cd1fcdf386a9 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-0da64b45-fa00-4fe8-8d1d-df586f27743f-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.464613] env[63345]: DEBUG nova.objects.instance [None req-6240971c-7ff9-467b-8001-cd1fcdf386a9 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lazy-loading 'flavor' on Instance uuid 0da64b45-fa00-4fe8-8d1d-df586f27743f {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 914.549189] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b10f5478-4b37-47ee-91ea-5d5fd0f186ad tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "5e20b33c-1481-4bd3-b269-29a70cc3150d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.375s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.648301] env[63345]: DEBUG nova.network.neutron [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Successfully created port: 08cd427f-e8d7-43d7-9746-27262c5bd8d2 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 914.660618] env[63345]: DEBUG oslo_vmware.api [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Task: {'id': task-1017441, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.560425} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.660883] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 914.661154] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 914.661976] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 914.661976] env[63345]: INFO nova.compute.manager [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Took 1.69 seconds to destroy the instance on the hypervisor. [ 914.661976] env[63345]: DEBUG oslo.service.loopingcall [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 914.661976] env[63345]: DEBUG nova.compute.manager [-] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 914.662253] env[63345]: DEBUG nova.network.neutron [-] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 914.944744] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017444, 'name': Rename_Task, 'duration_secs': 0.204437} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.950254] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 914.950254] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f46792c4-9f21-4cd7-a082-aefe0be211e9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.956484] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 914.956484] env[63345]: value = "task-1017445" [ 914.956484] env[63345]: _type = "Task" [ 914.956484] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.967299] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017445, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.968969] env[63345]: DEBUG nova.objects.instance [None req-6240971c-7ff9-467b-8001-cd1fcdf386a9 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lazy-loading 'pci_requests' on Instance uuid 0da64b45-fa00-4fe8-8d1d-df586f27743f {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 915.187889] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bcfd8ca-85a8-4e00-865a-2a8bea9a0c33 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.197254] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e52ec8-fb8a-49cd-9439-5d1373f4b78e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.227964] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e542fea-8d67-4ade-85e9-563aa5b72b94 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.236516] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca66986-5635-469a-884d-2175e2c36341 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.252253] env[63345]: DEBUG nova.compute.provider_tree [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.293427] env[63345]: DEBUG nova.compute.manager [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 915.337920] env[63345]: DEBUG nova.virt.hardware [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 915.338213] env[63345]: DEBUG nova.virt.hardware [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 915.338383] env[63345]: DEBUG nova.virt.hardware [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 915.338636] env[63345]: DEBUG nova.virt.hardware [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 915.338727] env[63345]: DEBUG nova.virt.hardware [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 915.338889] env[63345]: DEBUG nova.virt.hardware [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 915.339116] env[63345]: DEBUG nova.virt.hardware [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 915.339302] env[63345]: DEBUG nova.virt.hardware [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 915.339612] env[63345]: DEBUG nova.virt.hardware [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 915.339683] env[63345]: DEBUG nova.virt.hardware [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 915.339861] env[63345]: DEBUG nova.virt.hardware [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 915.340717] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf966b93-a891-452d-b711-d99f0ac2cefa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.349244] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8a837e-599b-4afd-9cf9-1932e40aed61 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.468762] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017445, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.471473] env[63345]: DEBUG nova.objects.base [None req-6240971c-7ff9-467b-8001-cd1fcdf386a9 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Object Instance<0da64b45-fa00-4fe8-8d1d-df586f27743f> lazy-loaded attributes: flavor,pci_requests {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 915.471742] env[63345]: DEBUG nova.network.neutron [None req-6240971c-7ff9-467b-8001-cd1fcdf386a9 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 915.619103] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6240971c-7ff9-467b-8001-cd1fcdf386a9 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-0da64b45-fa00-4fe8-8d1d-df586f27743f-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.155s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.755736] env[63345]: DEBUG nova.scheduler.client.report [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 915.974225] env[63345]: DEBUG oslo_vmware.api [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017445, 'name': PowerOnVM_Task, 'duration_secs': 0.876702} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.975026] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 915.975423] env[63345]: INFO nova.compute.manager [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Took 8.67 seconds to spawn the instance on the hypervisor. [ 915.975722] env[63345]: DEBUG nova.compute.manager [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 915.977466] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b0ac5f1-5760-430f-9e0d-8407279cb2ee {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.029708] env[63345]: DEBUG nova.network.neutron [-] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.261928] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.981s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.264307] env[63345]: DEBUG oslo_concurrency.lockutils [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.674s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.265795] env[63345]: INFO nova.compute.claims [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 916.326215] env[63345]: INFO nova.scheduler.client.report [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Deleted allocations for instance 75fc8365-bf8d-489e-935f-a5169c6a7e62 [ 916.497952] env[63345]: INFO nova.compute.manager [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Took 32.85 seconds to build instance. [ 916.535615] env[63345]: INFO nova.compute.manager [-] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Took 1.87 seconds to deallocate network for instance. [ 916.570086] env[63345]: DEBUG nova.network.neutron [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Successfully updated port: 08cd427f-e8d7-43d7-9746-27262c5bd8d2 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 916.657777] env[63345]: DEBUG nova.compute.manager [req-a036982b-dcb9-4ece-8925-26f2002bdd1c req-c2239527-6e0a-40ca-8f0c-fa2200edea6f service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Received event network-changed-d861c19b-10d8-47c4-90d2-a823d9faa164 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 916.657956] env[63345]: DEBUG nova.compute.manager [req-a036982b-dcb9-4ece-8925-26f2002bdd1c req-c2239527-6e0a-40ca-8f0c-fa2200edea6f service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Refreshing instance network info cache due to event network-changed-d861c19b-10d8-47c4-90d2-a823d9faa164. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 916.658188] env[63345]: DEBUG oslo_concurrency.lockutils [req-a036982b-dcb9-4ece-8925-26f2002bdd1c req-c2239527-6e0a-40ca-8f0c-fa2200edea6f service nova] Acquiring lock "refresh_cache-017a06b3-cc1a-4822-a07f-ca881fd4254b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.658379] env[63345]: DEBUG oslo_concurrency.lockutils [req-a036982b-dcb9-4ece-8925-26f2002bdd1c req-c2239527-6e0a-40ca-8f0c-fa2200edea6f service nova] Acquired lock "refresh_cache-017a06b3-cc1a-4822-a07f-ca881fd4254b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.658570] env[63345]: DEBUG nova.network.neutron [req-a036982b-dcb9-4ece-8925-26f2002bdd1c req-c2239527-6e0a-40ca-8f0c-fa2200edea6f service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Refreshing network info cache for port d861c19b-10d8-47c4-90d2-a823d9faa164 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 916.673463] env[63345]: DEBUG nova.compute.manager [req-e4aa7211-3e32-4193-ad97-0163ec98e561 req-8bd4a40a-83cf-45e5-8ded-196fd0f59d6d service nova] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Received event network-vif-deleted-35f00929-4dc1-4515-b0de-19a6377c68ca {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 916.835671] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fd2974a3-ed20-44e6-a8e4-d684154888e5 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "75fc8365-bf8d-489e-935f-a5169c6a7e62" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.399s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.000373] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4b90f49f-15ed-482a-b5f0-64d15f6afd80 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "4868a0a0-ca35-44b0-a90c-124aa366af76" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.359s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.041719] env[63345]: DEBUG oslo_concurrency.lockutils [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.072891] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "refresh_cache-e5546a26-3f94-48a6-914a-2c37e63a0aeb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.073057] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "refresh_cache-e5546a26-3f94-48a6-914a-2c37e63a0aeb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.073374] env[63345]: DEBUG nova.network.neutron [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 917.605326] env[63345]: DEBUG nova.network.neutron [req-a036982b-dcb9-4ece-8925-26f2002bdd1c req-c2239527-6e0a-40ca-8f0c-fa2200edea6f service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Updated VIF entry in instance network info cache for port d861c19b-10d8-47c4-90d2-a823d9faa164. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 917.605708] env[63345]: DEBUG nova.network.neutron [req-a036982b-dcb9-4ece-8925-26f2002bdd1c req-c2239527-6e0a-40ca-8f0c-fa2200edea6f service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Updating instance_info_cache with network_info: [{"id": "d861c19b-10d8-47c4-90d2-a823d9faa164", "address": "fa:16:3e:c9:2c:68", "network": {"id": "5a619899-0632-4b47-a853-63998d2913e7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1507203599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e9c7157843047ac8203d4fc5261572a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd861c19b-10", "ovs_interfaceid": "d861c19b-10d8-47c4-90d2-a823d9faa164", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.624175] env[63345]: DEBUG nova.network.neutron [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 917.633818] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5891d7b-0c4b-4112-93b5-211db01eeb52 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.644149] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9517e20a-74b6-4c6c-9e7a-9e8b1e8ecaf5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.679739] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668b04b7-a834-4117-982f-76db7535580f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.687736] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f866c2c-8011-4675-9897-644ddce31399 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.692819] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.704989] env[63345]: DEBUG nova.compute.provider_tree [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.786180] env[63345]: DEBUG nova.network.neutron [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Updating instance_info_cache with network_info: [{"id": "08cd427f-e8d7-43d7-9746-27262c5bd8d2", "address": "fa:16:3e:65:f2:57", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08cd427f-e8", "ovs_interfaceid": "08cd427f-e8d7-43d7-9746-27262c5bd8d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.111206] env[63345]: DEBUG oslo_concurrency.lockutils [req-a036982b-dcb9-4ece-8925-26f2002bdd1c req-c2239527-6e0a-40ca-8f0c-fa2200edea6f service nova] Releasing lock "refresh_cache-017a06b3-cc1a-4822-a07f-ca881fd4254b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.208578] env[63345]: DEBUG nova.scheduler.client.report [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 918.287543] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "refresh_cache-e5546a26-3f94-48a6-914a-2c37e63a0aeb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.287900] env[63345]: DEBUG nova.compute.manager [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Instance network_info: |[{"id": "08cd427f-e8d7-43d7-9746-27262c5bd8d2", "address": "fa:16:3e:65:f2:57", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08cd427f-e8", "ovs_interfaceid": "08cd427f-e8d7-43d7-9746-27262c5bd8d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 918.288381] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:f2:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f77ff7a1-209c-4f3f-b2a0-fd817741e739', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '08cd427f-e8d7-43d7-9746-27262c5bd8d2', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 918.296261] env[63345]: DEBUG oslo.service.loopingcall [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 918.296863] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 918.297120] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-329366cc-1cbc-4d55-9a6d-e17b8bf51263 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.318349] env[63345]: DEBUG oslo_concurrency.lockutils [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "interface-0da64b45-fa00-4fe8-8d1d-df586f27743f-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.318555] env[63345]: DEBUG oslo_concurrency.lockutils [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-0da64b45-fa00-4fe8-8d1d-df586f27743f-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.318899] env[63345]: DEBUG nova.objects.instance [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lazy-loading 'flavor' on Instance uuid 0da64b45-fa00-4fe8-8d1d-df586f27743f {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.320808] env[63345]: DEBUG oslo_concurrency.lockutils [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "a85688b0-d68f-4370-bd95-dc9fb1d2c26a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.321050] env[63345]: DEBUG oslo_concurrency.lockutils [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "a85688b0-d68f-4370-bd95-dc9fb1d2c26a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.321427] env[63345]: DEBUG oslo_concurrency.lockutils [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "a85688b0-d68f-4370-bd95-dc9fb1d2c26a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.321665] env[63345]: DEBUG oslo_concurrency.lockutils [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "a85688b0-d68f-4370-bd95-dc9fb1d2c26a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.321858] env[63345]: DEBUG oslo_concurrency.lockutils [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "a85688b0-d68f-4370-bd95-dc9fb1d2c26a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.324862] env[63345]: INFO nova.compute.manager [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Terminating instance [ 918.331151] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 918.331151] env[63345]: value = "task-1017446" [ 918.331151] env[63345]: _type = "Task" [ 918.331151] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.339645] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017446, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.416677] env[63345]: DEBUG oslo_concurrency.lockutils [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "a0eb9dae-0d27-419f-9210-eaa445e564c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.417012] env[63345]: DEBUG oslo_concurrency.lockutils [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "a0eb9dae-0d27-419f-9210-eaa445e564c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.715181] env[63345]: DEBUG oslo_concurrency.lockutils [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.450s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.715464] env[63345]: DEBUG nova.compute.manager [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 918.719162] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.227s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.719382] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.721537] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.034s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.721822] env[63345]: DEBUG nova.objects.instance [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Lazy-loading 'resources' on Instance uuid ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.767677] env[63345]: INFO nova.scheduler.client.report [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Deleted allocations for instance 11652422-9136-4453-b932-06695f9bc910 [ 918.830930] env[63345]: DEBUG nova.compute.manager [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 918.831931] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 918.832644] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-731bdb7d-4fa3-4f6e-823f-3440f448b71a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.845110] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017446, 'name': CreateVM_Task, 'duration_secs': 0.342416} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.850594] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 918.850594] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 918.850594] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.850594] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.850594] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 918.850594] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d3d37e93-54d1-406c-8fb9-69661608dd5e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.850594] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f27cfa5-bf6a-4736-a097-8cc4a8b7caad {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.855580] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 918.855580] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5258e1ac-1850-90a9-fc0a-203149ce66ce" [ 918.855580] env[63345]: _type = "Task" [ 918.855580] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.858494] env[63345]: DEBUG oslo_vmware.api [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 918.858494] env[63345]: value = "task-1017447" [ 918.858494] env[63345]: _type = "Task" [ 918.858494] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.868504] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5258e1ac-1850-90a9-fc0a-203149ce66ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.871660] env[63345]: DEBUG oslo_vmware.api [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017447, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.919776] env[63345]: DEBUG nova.compute.manager [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 918.942078] env[63345]: DEBUG nova.objects.instance [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lazy-loading 'pci_requests' on Instance uuid 0da64b45-fa00-4fe8-8d1d-df586f27743f {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 919.081526] env[63345]: DEBUG nova.compute.manager [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Received event network-vif-plugged-08cd427f-e8d7-43d7-9746-27262c5bd8d2 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 919.081848] env[63345]: DEBUG oslo_concurrency.lockutils [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] Acquiring lock "e5546a26-3f94-48a6-914a-2c37e63a0aeb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.082029] env[63345]: DEBUG oslo_concurrency.lockutils [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] Lock "e5546a26-3f94-48a6-914a-2c37e63a0aeb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.082269] env[63345]: DEBUG oslo_concurrency.lockutils [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] Lock "e5546a26-3f94-48a6-914a-2c37e63a0aeb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.082438] env[63345]: DEBUG nova.compute.manager [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] No waiting events found dispatching network-vif-plugged-08cd427f-e8d7-43d7-9746-27262c5bd8d2 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 919.082550] env[63345]: WARNING nova.compute.manager [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Received unexpected event network-vif-plugged-08cd427f-e8d7-43d7-9746-27262c5bd8d2 for instance with vm_state building and task_state spawning. [ 919.082748] env[63345]: DEBUG nova.compute.manager [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Received event network-changed-08cd427f-e8d7-43d7-9746-27262c5bd8d2 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 919.082930] env[63345]: DEBUG nova.compute.manager [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Refreshing instance network info cache due to event network-changed-08cd427f-e8d7-43d7-9746-27262c5bd8d2. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 919.083199] env[63345]: DEBUG oslo_concurrency.lockutils [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] Acquiring lock "refresh_cache-e5546a26-3f94-48a6-914a-2c37e63a0aeb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.083358] env[63345]: DEBUG oslo_concurrency.lockutils [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] Acquired lock "refresh_cache-e5546a26-3f94-48a6-914a-2c37e63a0aeb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.083524] env[63345]: DEBUG nova.network.neutron [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Refreshing network info cache for port 08cd427f-e8d7-43d7-9746-27262c5bd8d2 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 919.229020] env[63345]: DEBUG nova.compute.utils [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 919.230336] env[63345]: DEBUG nova.compute.manager [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 919.230506] env[63345]: DEBUG nova.network.neutron [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 919.269397] env[63345]: DEBUG nova.policy [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27fc4f99c7f44b1ea421bd8f13de6e43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '964cee117b3c4601b3afe82a8bb9c23e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 919.275721] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b903c3c1-305b-447b-8a72-5019cacea85b tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "11652422-9136-4453-b932-06695f9bc910" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.449s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.374091] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5258e1ac-1850-90a9-fc0a-203149ce66ce, 'name': SearchDatastore_Task, 'duration_secs': 0.014768} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.374366] env[63345]: DEBUG oslo_vmware.api [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017447, 'name': PowerOffVM_Task, 'duration_secs': 0.26261} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.377781] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.378036] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 919.378278] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.378430] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.378615] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 919.378892] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 919.379067] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 919.379752] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af53dedf-58d3-4bf9-92d9-127e7e88ada7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.381609] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58b77ffc-925d-462c-be2c-1d3580c07679 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.390118] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 919.390315] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 919.391084] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41c0a6ff-c933-4ec9-bb10-f469f9c61c60 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.398486] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 919.398486] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d42c52-2647-b49c-0ce0-99fc14e04a02" [ 919.398486] env[63345]: _type = "Task" [ 919.398486] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.407083] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d42c52-2647-b49c-0ce0-99fc14e04a02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.439103] env[63345]: DEBUG oslo_concurrency.lockutils [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.445567] env[63345]: DEBUG nova.objects.base [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Object Instance<0da64b45-fa00-4fe8-8d1d-df586f27743f> lazy-loaded attributes: flavor,pci_requests {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 919.445910] env[63345]: DEBUG nova.network.neutron [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 919.461109] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 919.461314] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 919.461502] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Deleting the datastore file [datastore2] a85688b0-d68f-4370-bd95-dc9fb1d2c26a {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 919.461795] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1da83e94-ca90-49c3-8498-0d9e80fe2282 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.468890] env[63345]: DEBUG oslo_vmware.api [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 919.468890] env[63345]: value = "task-1017449" [ 919.468890] env[63345]: _type = "Task" [ 919.468890] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.477757] env[63345]: DEBUG oslo_vmware.api [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017449, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.485808] env[63345]: DEBUG nova.policy [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e36fd04030444217acadbbf4e4fe9be0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33c28bfca4da460e8ca96dc7519204c8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 919.528806] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba45c6ba-789d-4325-a79c-28e0674baaa0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.536141] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10fe161-8984-47c2-82ce-b5062fa91c20 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.316235] env[63345]: DEBUG nova.compute.manager [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 920.319929] env[63345]: DEBUG nova.network.neutron [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Successfully created port: 2b931f56-815d-48ec-915d-c68e2ae0333f {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 920.322083] env[63345]: DEBUG nova.network.neutron [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Successfully created port: 0cc6f455-5ad2-4802-a0ff-42268fe50023 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 920.332493] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35741376-e981-4b6f-984b-7d8b1c992722 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.348213] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d42c52-2647-b49c-0ce0-99fc14e04a02, 'name': SearchDatastore_Task, 'duration_secs': 0.010328} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.348689] env[63345]: DEBUG oslo_vmware.api [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017449, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140473} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.350273] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52718db-440d-4b9e-bd40-9bd5e2aa51bc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.357129] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 920.357129] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 920.357129] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 920.357129] env[63345]: INFO nova.compute.manager [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Took 1.52 seconds to destroy the instance on the hypervisor. [ 920.357129] env[63345]: DEBUG oslo.service.loopingcall [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 920.357129] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a475e93-9e53-40c6-b1fa-a613c45b8fe0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.358445] env[63345]: DEBUG nova.compute.manager [-] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 920.358550] env[63345]: DEBUG nova.network.neutron [-] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 920.364865] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 920.364865] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5233d270-3e46-7763-5854-9906a6206e69" [ 920.364865] env[63345]: _type = "Task" [ 920.364865] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.374132] env[63345]: DEBUG nova.compute.provider_tree [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.385846] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5233d270-3e46-7763-5854-9906a6206e69, 'name': SearchDatastore_Task, 'duration_secs': 0.009092} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.386141] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.386372] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] e5546a26-3f94-48a6-914a-2c37e63a0aeb/e5546a26-3f94-48a6-914a-2c37e63a0aeb.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 920.386625] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76f24620-222f-4276-83e7-1c33d3646dbb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.393964] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 920.393964] env[63345]: value = "task-1017450" [ 920.393964] env[63345]: _type = "Task" [ 920.393964] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.402392] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017450, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.713865] env[63345]: DEBUG nova.network.neutron [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Updated VIF entry in instance network info cache for port 08cd427f-e8d7-43d7-9746-27262c5bd8d2. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 920.714648] env[63345]: DEBUG nova.network.neutron [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Updating instance_info_cache with network_info: [{"id": "08cd427f-e8d7-43d7-9746-27262c5bd8d2", "address": "fa:16:3e:65:f2:57", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08cd427f-e8", "ovs_interfaceid": "08cd427f-e8d7-43d7-9746-27262c5bd8d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.840510] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Acquiring lock "017a06b3-cc1a-4822-a07f-ca881fd4254b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.840921] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Lock "017a06b3-cc1a-4822-a07f-ca881fd4254b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.841283] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Acquiring lock "017a06b3-cc1a-4822-a07f-ca881fd4254b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.841550] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Lock "017a06b3-cc1a-4822-a07f-ca881fd4254b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.841775] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Lock "017a06b3-cc1a-4822-a07f-ca881fd4254b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.843850] env[63345]: INFO nova.compute.manager [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Terminating instance [ 920.879914] env[63345]: DEBUG nova.scheduler.client.report [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 920.908587] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017450, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499097} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.908587] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] e5546a26-3f94-48a6-914a-2c37e63a0aeb/e5546a26-3f94-48a6-914a-2c37e63a0aeb.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 920.908587] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 920.908587] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7e78d0c5-10b7-422a-916a-f4ad58202584 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.914321] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 920.914321] env[63345]: value = "task-1017451" [ 920.914321] env[63345]: _type = "Task" [ 920.914321] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.921950] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017451, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.218158] env[63345]: DEBUG oslo_concurrency.lockutils [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] Releasing lock "refresh_cache-e5546a26-3f94-48a6-914a-2c37e63a0aeb" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.218468] env[63345]: DEBUG nova.compute.manager [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Received event network-changed-277d5619-4a4c-4f02-9ce7-786f57c7dc46 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 921.218647] env[63345]: DEBUG nova.compute.manager [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Refreshing instance network info cache due to event network-changed-277d5619-4a4c-4f02-9ce7-786f57c7dc46. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 921.218899] env[63345]: DEBUG oslo_concurrency.lockutils [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] Acquiring lock "refresh_cache-4868a0a0-ca35-44b0-a90c-124aa366af76" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.219065] env[63345]: DEBUG oslo_concurrency.lockutils [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] Acquired lock "refresh_cache-4868a0a0-ca35-44b0-a90c-124aa366af76" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.219237] env[63345]: DEBUG nova.network.neutron [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Refreshing network info cache for port 277d5619-4a4c-4f02-9ce7-786f57c7dc46 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 921.345882] env[63345]: DEBUG nova.compute.manager [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 921.348484] env[63345]: DEBUG nova.compute.manager [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 921.348691] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 921.349612] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de42765f-1f24-4d56-b6ce-9b4c59490a40 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.358977] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 921.359147] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b62ba65d-bdc3-4982-99e3-d79a43f9c036 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.367253] env[63345]: DEBUG oslo_vmware.api [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Waiting for the task: (returnval){ [ 921.367253] env[63345]: value = "task-1017452" [ 921.367253] env[63345]: _type = "Task" [ 921.367253] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.370792] env[63345]: DEBUG nova.network.neutron [-] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.377922] env[63345]: DEBUG oslo_vmware.api [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': task-1017452, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.384934] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.663s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.387469] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 21.906s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.393899] env[63345]: DEBUG nova.virt.hardware [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 921.395322] env[63345]: DEBUG nova.virt.hardware [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 921.395322] env[63345]: DEBUG nova.virt.hardware [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 921.395322] env[63345]: DEBUG nova.virt.hardware [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 921.395322] env[63345]: DEBUG nova.virt.hardware [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 921.395322] env[63345]: DEBUG nova.virt.hardware [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 921.395322] env[63345]: DEBUG nova.virt.hardware [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 921.395322] env[63345]: DEBUG nova.virt.hardware [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 921.395594] env[63345]: DEBUG nova.virt.hardware [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 921.395632] env[63345]: DEBUG nova.virt.hardware [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 921.396051] env[63345]: DEBUG nova.virt.hardware [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 921.396730] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb183b7-2028-49f3-985e-211cac89dd24 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.406138] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0953a40d-17d4-4244-91af-2334c09ea2af {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.431970] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017451, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063446} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.432650] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 921.433122] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b98489c-e4cd-4243-af3e-cfdcf54e363d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.457390] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] e5546a26-3f94-48a6-914a-2c37e63a0aeb/e5546a26-3f94-48a6-914a-2c37e63a0aeb.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 921.458701] env[63345]: INFO nova.scheduler.client.report [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Deleted allocations for instance ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6 [ 921.459945] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a120225-9146-4c69-b98f-d1bfc35dcb4d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.484161] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 921.484161] env[63345]: value = "task-1017453" [ 921.484161] env[63345]: _type = "Task" [ 921.484161] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.492503] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017453, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.735112] env[63345]: DEBUG nova.compute.manager [req-56cb946a-1bf2-44bc-baca-a6677a868ed1 req-88006d8d-37db-49b5-ab57-d4a36a61af26 service nova] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Received event network-vif-deleted-e0d9c52f-00fe-4c7a-9301-348d5c2c56cf {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 921.878018] env[63345]: INFO nova.compute.manager [-] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Took 1.52 seconds to deallocate network for instance. [ 921.878385] env[63345]: DEBUG oslo_vmware.api [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': task-1017452, 'name': PowerOffVM_Task, 'duration_secs': 0.317777} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.882282] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 921.882493] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 921.885257] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8645dc77-12a7-4001-8de9-44c23d10fade {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.969134] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 921.969134] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 921.969134] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Deleting the datastore file [datastore2] 017a06b3-cc1a-4822-a07f-ca881fd4254b {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 921.969134] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-619d3400-5f90-4c81-9ba8-cc218ec97030 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.977873] env[63345]: DEBUG oslo_vmware.api [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Waiting for the task: (returnval){ [ 921.977873] env[63345]: value = "task-1017455" [ 921.977873] env[63345]: _type = "Task" [ 921.977873] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.982024] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0a2e023c-b063-48d9-b52b-50a4616fb8b5 tempest-ServerShowV254Test-214130529 tempest-ServerShowV254Test-214130529-project-member] Lock "ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.620s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.995686] env[63345]: DEBUG oslo_vmware.api [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': task-1017455, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.000284] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017453, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.082267] env[63345]: DEBUG nova.network.neutron [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Successfully updated port: 0cc6f455-5ad2-4802-a0ff-42268fe50023 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 922.159626] env[63345]: DEBUG nova.compute.manager [req-42d8536b-47ec-43d8-8469-6082bf03f9ff req-f7639a0e-f40a-4775-952f-7a6f8e6c07a8 service nova] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Received event network-vif-plugged-0cc6f455-5ad2-4802-a0ff-42268fe50023 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 922.159843] env[63345]: DEBUG oslo_concurrency.lockutils [req-42d8536b-47ec-43d8-8469-6082bf03f9ff req-f7639a0e-f40a-4775-952f-7a6f8e6c07a8 service nova] Acquiring lock "0fe61754-458c-4c5c-bb2d-2677302e5fb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.160068] env[63345]: DEBUG oslo_concurrency.lockutils [req-42d8536b-47ec-43d8-8469-6082bf03f9ff req-f7639a0e-f40a-4775-952f-7a6f8e6c07a8 service nova] Lock "0fe61754-458c-4c5c-bb2d-2677302e5fb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.160255] env[63345]: DEBUG oslo_concurrency.lockutils [req-42d8536b-47ec-43d8-8469-6082bf03f9ff req-f7639a0e-f40a-4775-952f-7a6f8e6c07a8 service nova] Lock "0fe61754-458c-4c5c-bb2d-2677302e5fb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.160429] env[63345]: DEBUG nova.compute.manager [req-42d8536b-47ec-43d8-8469-6082bf03f9ff req-f7639a0e-f40a-4775-952f-7a6f8e6c07a8 service nova] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] No waiting events found dispatching network-vif-plugged-0cc6f455-5ad2-4802-a0ff-42268fe50023 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 922.160602] env[63345]: WARNING nova.compute.manager [req-42d8536b-47ec-43d8-8469-6082bf03f9ff req-f7639a0e-f40a-4775-952f-7a6f8e6c07a8 service nova] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Received unexpected event network-vif-plugged-0cc6f455-5ad2-4802-a0ff-42268fe50023 for instance with vm_state building and task_state spawning. [ 922.162119] env[63345]: DEBUG nova.network.neutron [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Updated VIF entry in instance network info cache for port 277d5619-4a4c-4f02-9ce7-786f57c7dc46. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 922.162514] env[63345]: DEBUG nova.network.neutron [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Updating instance_info_cache with network_info: [{"id": "277d5619-4a4c-4f02-9ce7-786f57c7dc46", "address": "fa:16:3e:78:ff:28", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap277d5619-4a", "ovs_interfaceid": "277d5619-4a4c-4f02-9ce7-786f57c7dc46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.389820] env[63345]: DEBUG oslo_concurrency.lockutils [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.437627] env[63345]: WARNING nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance bc9d2e6a-f77a-4a21-90bc-81949cbfce91 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 922.437838] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 869f8110-6490-4a47-955a-0ce085f826af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.437996] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 3a85df04-3997-48a3-8992-f24fe997b3cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.438161] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance b5173471-3367-42ba-b450-62ad8573f048 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.438305] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance a85688b0-d68f-4370-bd95-dc9fb1d2c26a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.438452] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance dde93fd5-6312-4d91-b041-b7fc84b207d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.438577] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 5e20b33c-1481-4bd3-b269-29a70cc3150d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.438706] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 0da64b45-fa00-4fe8-8d1d-df586f27743f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.438896] env[63345]: WARNING nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance a415d4f2-abc7-4553-8442-312316e686b2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 922.439044] env[63345]: WARNING nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 922.439174] env[63345]: WARNING nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 27e2cb12-d251-434a-b79e-6fbda80d3637 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 922.439290] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 017a06b3-cc1a-4822-a07f-ca881fd4254b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.439403] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance c84c8b9a-9164-4dd7-b094-dd09c15c6f21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.458866] env[63345]: DEBUG nova.compute.manager [req-19aeedf7-a49c-459e-9210-45f1b7b1e826 req-f18cdd00-7bc3-4904-9646-7a14ab6e475b service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Received event network-vif-plugged-2b931f56-815d-48ec-915d-c68e2ae0333f {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 922.459310] env[63345]: DEBUG oslo_concurrency.lockutils [req-19aeedf7-a49c-459e-9210-45f1b7b1e826 req-f18cdd00-7bc3-4904-9646-7a14ab6e475b service nova] Acquiring lock "0da64b45-fa00-4fe8-8d1d-df586f27743f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.459310] env[63345]: DEBUG oslo_concurrency.lockutils [req-19aeedf7-a49c-459e-9210-45f1b7b1e826 req-f18cdd00-7bc3-4904-9646-7a14ab6e475b service nova] Lock "0da64b45-fa00-4fe8-8d1d-df586f27743f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.459485] env[63345]: DEBUG oslo_concurrency.lockutils [req-19aeedf7-a49c-459e-9210-45f1b7b1e826 req-f18cdd00-7bc3-4904-9646-7a14ab6e475b service nova] Lock "0da64b45-fa00-4fe8-8d1d-df586f27743f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.459662] env[63345]: DEBUG nova.compute.manager [req-19aeedf7-a49c-459e-9210-45f1b7b1e826 req-f18cdd00-7bc3-4904-9646-7a14ab6e475b service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] No waiting events found dispatching network-vif-plugged-2b931f56-815d-48ec-915d-c68e2ae0333f {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 922.459859] env[63345]: WARNING nova.compute.manager [req-19aeedf7-a49c-459e-9210-45f1b7b1e826 req-f18cdd00-7bc3-4904-9646-7a14ab6e475b service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Received unexpected event network-vif-plugged-2b931f56-815d-48ec-915d-c68e2ae0333f for instance with vm_state active and task_state None. [ 922.489029] env[63345]: DEBUG oslo_vmware.api [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Task: {'id': task-1017455, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.41196} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.491565] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 922.491934] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 922.492468] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 922.492468] env[63345]: INFO nova.compute.manager [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 922.492730] env[63345]: DEBUG oslo.service.loopingcall [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 922.493208] env[63345]: DEBUG nova.compute.manager [-] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 922.493308] env[63345]: DEBUG nova.network.neutron [-] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 922.500285] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017453, 'name': ReconfigVM_Task, 'duration_secs': 0.782166} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.500503] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Reconfigured VM instance instance-00000057 to attach disk [datastore2] e5546a26-3f94-48a6-914a-2c37e63a0aeb/e5546a26-3f94-48a6-914a-2c37e63a0aeb.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 922.501146] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6e1daa28-ae97-4a0d-b5b3-009c580b8498 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.506865] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 922.506865] env[63345]: value = "task-1017456" [ 922.506865] env[63345]: _type = "Task" [ 922.506865] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.514441] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017456, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.584308] env[63345]: DEBUG oslo_concurrency.lockutils [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "refresh_cache-0fe61754-458c-4c5c-bb2d-2677302e5fb9" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.584490] env[63345]: DEBUG oslo_concurrency.lockutils [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "refresh_cache-0fe61754-458c-4c5c-bb2d-2677302e5fb9" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.584646] env[63345]: DEBUG nova.network.neutron [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 922.666716] env[63345]: DEBUG oslo_concurrency.lockutils [req-ef4bcee8-9225-4bfb-b165-49d33ffbfb66 req-2671e40f-f869-4555-b69f-81c7e4f08da8 service nova] Releasing lock "refresh_cache-4868a0a0-ca35-44b0-a90c-124aa366af76" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.813629] env[63345]: DEBUG nova.network.neutron [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Successfully updated port: 2b931f56-815d-48ec-915d-c68e2ae0333f {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 922.942589] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 070a834d-6478-4705-8df0-2a27c8780507 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.942797] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 4868a0a0-ca35-44b0-a90c-124aa366af76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 923.017323] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017456, 'name': Rename_Task, 'duration_secs': 0.169587} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.017610] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 923.017855] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a9a419b3-d770-410a-bbcc-03fc5ea8892a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.024236] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 923.024236] env[63345]: value = "task-1017457" [ 923.024236] env[63345]: _type = "Task" [ 923.024236] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.031874] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017457, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.127567] env[63345]: DEBUG nova.network.neutron [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 923.290634] env[63345]: DEBUG nova.network.neutron [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Updating instance_info_cache with network_info: [{"id": "0cc6f455-5ad2-4802-a0ff-42268fe50023", "address": "fa:16:3e:61:01:ef", "network": {"id": "80bb8388-e130-46af-a4fc-1daea51d1bf5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1343573007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "964cee117b3c4601b3afe82a8bb9c23e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0cc6f455-5a", "ovs_interfaceid": "0cc6f455-5ad2-4802-a0ff-42268fe50023", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.316056] env[63345]: DEBUG oslo_concurrency.lockutils [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.316341] env[63345]: DEBUG oslo_concurrency.lockutils [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.316483] env[63345]: DEBUG nova.network.neutron [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 923.339900] env[63345]: DEBUG nova.network.neutron [-] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.445906] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 9aa651b8-317d-4153-8c33-9df0a5d16115 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 923.446218] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance e5546a26-3f94-48a6-914a-2c37e63a0aeb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 923.446261] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 0fe61754-458c-4c5c-bb2d-2677302e5fb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 923.537202] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017457, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.793309] env[63345]: DEBUG oslo_concurrency.lockutils [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "refresh_cache-0fe61754-458c-4c5c-bb2d-2677302e5fb9" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.793704] env[63345]: DEBUG nova.compute.manager [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Instance network_info: |[{"id": "0cc6f455-5ad2-4802-a0ff-42268fe50023", "address": "fa:16:3e:61:01:ef", "network": {"id": "80bb8388-e130-46af-a4fc-1daea51d1bf5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1343573007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "964cee117b3c4601b3afe82a8bb9c23e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0cc6f455-5a", "ovs_interfaceid": "0cc6f455-5ad2-4802-a0ff-42268fe50023", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 923.794300] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:01:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ddfb706a-add1-4e16-9ac4-d20b16a1df6d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0cc6f455-5ad2-4802-a0ff-42268fe50023', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 923.802502] env[63345]: DEBUG oslo.service.loopingcall [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 923.802741] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 923.802969] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d7038c3-0f1f-4828-872f-0c0664fb8178 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.822582] env[63345]: DEBUG nova.compute.manager [req-e0915127-3249-4d9d-a324-8cceadf3bab3 req-843d277a-9196-4668-a41f-695ee14e6723 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Received event network-changed-2b931f56-815d-48ec-915d-c68e2ae0333f {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 923.822787] env[63345]: DEBUG nova.compute.manager [req-e0915127-3249-4d9d-a324-8cceadf3bab3 req-843d277a-9196-4668-a41f-695ee14e6723 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Refreshing instance network info cache due to event network-changed-2b931f56-815d-48ec-915d-c68e2ae0333f. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 923.822982] env[63345]: DEBUG oslo_concurrency.lockutils [req-e0915127-3249-4d9d-a324-8cceadf3bab3 req-843d277a-9196-4668-a41f-695ee14e6723 service nova] Acquiring lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.824825] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 923.824825] env[63345]: value = "task-1017458" [ 923.824825] env[63345]: _type = "Task" [ 923.824825] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.833519] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017458, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.842114] env[63345]: INFO nova.compute.manager [-] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Took 1.35 seconds to deallocate network for instance. [ 923.858029] env[63345]: WARNING nova.network.neutron [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] b360ab0d-3deb-4632-a8d5-c1639db9e9e2 already exists in list: networks containing: ['b360ab0d-3deb-4632-a8d5-c1639db9e9e2']. ignoring it [ 923.949296] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 726332dd-8699-49a4-a9ea-b9cbfc159855 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 924.040020] env[63345]: DEBUG oslo_vmware.api [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017457, 'name': PowerOnVM_Task, 'duration_secs': 0.546412} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.040020] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 924.040020] env[63345]: INFO nova.compute.manager [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Took 8.74 seconds to spawn the instance on the hypervisor. [ 924.040020] env[63345]: DEBUG nova.compute.manager [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 924.040020] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b024ce-5e9b-4b5f-b645-99fec0c797c2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.266013] env[63345]: DEBUG nova.network.neutron [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Updating instance_info_cache with network_info: [{"id": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "address": "fa:16:3e:80:1f:9f", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e054cb2-eb", "ovs_interfaceid": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2b931f56-815d-48ec-915d-c68e2ae0333f", "address": "fa:16:3e:a3:be:c5", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b931f56-81", "ovs_interfaceid": "2b931f56-815d-48ec-915d-c68e2ae0333f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.297629] env[63345]: DEBUG nova.compute.manager [req-59e47c70-fd2d-49f9-9a8a-e345c9bb711a req-e8bdd348-1f30-497f-89cd-2f8c1605ecba service nova] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Received event network-changed-0cc6f455-5ad2-4802-a0ff-42268fe50023 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 924.297835] env[63345]: DEBUG nova.compute.manager [req-59e47c70-fd2d-49f9-9a8a-e345c9bb711a req-e8bdd348-1f30-497f-89cd-2f8c1605ecba service nova] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Refreshing instance network info cache due to event network-changed-0cc6f455-5ad2-4802-a0ff-42268fe50023. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 924.298073] env[63345]: DEBUG oslo_concurrency.lockutils [req-59e47c70-fd2d-49f9-9a8a-e345c9bb711a req-e8bdd348-1f30-497f-89cd-2f8c1605ecba service nova] Acquiring lock "refresh_cache-0fe61754-458c-4c5c-bb2d-2677302e5fb9" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.298227] env[63345]: DEBUG oslo_concurrency.lockutils [req-59e47c70-fd2d-49f9-9a8a-e345c9bb711a req-e8bdd348-1f30-497f-89cd-2f8c1605ecba service nova] Acquired lock "refresh_cache-0fe61754-458c-4c5c-bb2d-2677302e5fb9" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.298395] env[63345]: DEBUG nova.network.neutron [req-59e47c70-fd2d-49f9-9a8a-e345c9bb711a req-e8bdd348-1f30-497f-89cd-2f8c1605ecba service nova] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Refreshing network info cache for port 0cc6f455-5ad2-4802-a0ff-42268fe50023 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 924.335157] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017458, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.348089] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.452534] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 49cf9c08-4024-40aa-9370-7b4f8d89e2cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 924.555634] env[63345]: INFO nova.compute.manager [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Took 33.97 seconds to build instance. [ 924.769233] env[63345]: DEBUG oslo_concurrency.lockutils [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.769756] env[63345]: DEBUG oslo_concurrency.lockutils [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.770104] env[63345]: DEBUG oslo_concurrency.lockutils [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.770259] env[63345]: DEBUG oslo_concurrency.lockutils [req-e0915127-3249-4d9d-a324-8cceadf3bab3 req-843d277a-9196-4668-a41f-695ee14e6723 service nova] Acquired lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.770444] env[63345]: DEBUG nova.network.neutron [req-e0915127-3249-4d9d-a324-8cceadf3bab3 req-843d277a-9196-4668-a41f-695ee14e6723 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Refreshing network info cache for port 2b931f56-815d-48ec-915d-c68e2ae0333f {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 924.772284] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed708e1-b3bc-48c5-8857-a4d8f1c6b0c9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.790245] env[63345]: DEBUG nova.virt.hardware [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 924.790493] env[63345]: DEBUG nova.virt.hardware [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 924.790660] env[63345]: DEBUG nova.virt.hardware [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 924.790850] env[63345]: DEBUG nova.virt.hardware [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 924.791008] env[63345]: DEBUG nova.virt.hardware [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 924.791173] env[63345]: DEBUG nova.virt.hardware [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 924.791383] env[63345]: DEBUG nova.virt.hardware [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 924.791545] env[63345]: DEBUG nova.virt.hardware [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 924.791749] env[63345]: DEBUG nova.virt.hardware [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 924.791932] env[63345]: DEBUG nova.virt.hardware [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 924.792133] env[63345]: DEBUG nova.virt.hardware [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 924.798320] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Reconfiguring VM to attach interface {{(pid=63345) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 924.798906] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54d61523-008c-4f25-96b1-0bcb8db5690f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.819024] env[63345]: DEBUG oslo_vmware.api [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 924.819024] env[63345]: value = "task-1017459" [ 924.819024] env[63345]: _type = "Task" [ 924.819024] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.827351] env[63345]: DEBUG oslo_vmware.api [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017459, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.835209] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017458, 'name': CreateVM_Task, 'duration_secs': 0.654149} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.835366] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 924.836067] env[63345]: DEBUG oslo_concurrency.lockutils [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.836248] env[63345]: DEBUG oslo_concurrency.lockutils [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.836578] env[63345]: DEBUG oslo_concurrency.lockutils [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 924.836838] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-442e8eb1-6c15-4cf9-9ba1-cc6cea2cec30 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.841101] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 924.841101] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]523d26a1-f76c-9fcf-75ed-d1d3d475ac8e" [ 924.841101] env[63345]: _type = "Task" [ 924.841101] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.848511] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]523d26a1-f76c-9fcf-75ed-d1d3d475ac8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.955160] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance a0eb9dae-0d27-419f-9210-eaa445e564c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 924.955431] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 924.955581] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2816MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 925.057491] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1cce828c-4ddd-4480-bffe-2dd2d1625e67 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "e5546a26-3f94-48a6-914a-2c37e63a0aeb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.478s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.066370] env[63345]: DEBUG nova.network.neutron [req-59e47c70-fd2d-49f9-9a8a-e345c9bb711a req-e8bdd348-1f30-497f-89cd-2f8c1605ecba service nova] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Updated VIF entry in instance network info cache for port 0cc6f455-5ad2-4802-a0ff-42268fe50023. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 925.066767] env[63345]: DEBUG nova.network.neutron [req-59e47c70-fd2d-49f9-9a8a-e345c9bb711a req-e8bdd348-1f30-497f-89cd-2f8c1605ecba service nova] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Updating instance_info_cache with network_info: [{"id": "0cc6f455-5ad2-4802-a0ff-42268fe50023", "address": "fa:16:3e:61:01:ef", "network": {"id": "80bb8388-e130-46af-a4fc-1daea51d1bf5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1343573007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "964cee117b3c4601b3afe82a8bb9c23e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0cc6f455-5a", "ovs_interfaceid": "0cc6f455-5ad2-4802-a0ff-42268fe50023", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.222023] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e26e7ca-8cd3-46a7-bc5b-af87dc2fb7b5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.229771] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b65a514-cb9f-442f-b847-78a1340e86ba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.260652] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374ec264-a4df-4dcc-ae0f-057c1000b3ea {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.267886] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3f8ec0-ffb3-40ff-ac10-18595488caa9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.282910] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.328673] env[63345]: DEBUG oslo_vmware.api [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017459, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.352106] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]523d26a1-f76c-9fcf-75ed-d1d3d475ac8e, 'name': SearchDatastore_Task, 'duration_secs': 0.024774} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.352415] env[63345]: DEBUG oslo_concurrency.lockutils [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.352672] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 925.352912] env[63345]: DEBUG oslo_concurrency.lockutils [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.353078] env[63345]: DEBUG oslo_concurrency.lockutils [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.353267] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 925.353521] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7a40d15-6c8f-467f-bfdb-2cdf0740113e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.362189] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 925.362383] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 925.363100] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a73dcdc-2a86-4a47-9a66-6db60d59153e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.369949] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 925.369949] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]521c25c7-6e46-d68f-7eee-39bfb8577556" [ 925.369949] env[63345]: _type = "Task" [ 925.369949] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.377999] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521c25c7-6e46-d68f-7eee-39bfb8577556, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.513289] env[63345]: DEBUG nova.network.neutron [req-e0915127-3249-4d9d-a324-8cceadf3bab3 req-843d277a-9196-4668-a41f-695ee14e6723 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Updated VIF entry in instance network info cache for port 2b931f56-815d-48ec-915d-c68e2ae0333f. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 925.513742] env[63345]: DEBUG nova.network.neutron [req-e0915127-3249-4d9d-a324-8cceadf3bab3 req-843d277a-9196-4668-a41f-695ee14e6723 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Updating instance_info_cache with network_info: [{"id": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "address": "fa:16:3e:80:1f:9f", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e054cb2-eb", "ovs_interfaceid": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2b931f56-815d-48ec-915d-c68e2ae0333f", "address": "fa:16:3e:a3:be:c5", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b931f56-81", "ovs_interfaceid": "2b931f56-815d-48ec-915d-c68e2ae0333f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.572238] env[63345]: DEBUG oslo_concurrency.lockutils [req-59e47c70-fd2d-49f9-9a8a-e345c9bb711a req-e8bdd348-1f30-497f-89cd-2f8c1605ecba service nova] Releasing lock "refresh_cache-0fe61754-458c-4c5c-bb2d-2677302e5fb9" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.582522] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "e5546a26-3f94-48a6-914a-2c37e63a0aeb" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.582799] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "e5546a26-3f94-48a6-914a-2c37e63a0aeb" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.785983] env[63345]: DEBUG nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 925.830515] env[63345]: DEBUG oslo_vmware.api [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017459, 'name': ReconfigVM_Task, 'duration_secs': 0.870022} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.830964] env[63345]: DEBUG oslo_concurrency.lockutils [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.831231] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Reconfigured VM to attach interface {{(pid=63345) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 925.881307] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521c25c7-6e46-d68f-7eee-39bfb8577556, 'name': SearchDatastore_Task, 'duration_secs': 0.011136} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.882141] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d75a3ca1-91b7-45fd-b4a3-f602718f23f2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.887210] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 925.887210] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b5f506-f063-c0a1-866d-a31c3f4787ba" [ 925.887210] env[63345]: _type = "Task" [ 925.887210] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.894897] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b5f506-f063-c0a1-866d-a31c3f4787ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.016596] env[63345]: DEBUG oslo_concurrency.lockutils [req-e0915127-3249-4d9d-a324-8cceadf3bab3 req-843d277a-9196-4668-a41f-695ee14e6723 service nova] Releasing lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.016876] env[63345]: DEBUG nova.compute.manager [req-e0915127-3249-4d9d-a324-8cceadf3bab3 req-843d277a-9196-4668-a41f-695ee14e6723 service nova] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Received event network-vif-deleted-d861c19b-10d8-47c4-90d2-a823d9faa164 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 926.086447] env[63345]: DEBUG nova.compute.utils [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 926.290586] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63345) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 926.290874] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.904s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.291230] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.067s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.293043] env[63345]: INFO nova.compute.claims [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 926.335863] env[63345]: DEBUG oslo_concurrency.lockutils [None req-404ba970-da5b-41c1-bed5-c785a50d42c0 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-0da64b45-fa00-4fe8-8d1d-df586f27743f-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.017s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.397605] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b5f506-f063-c0a1-866d-a31c3f4787ba, 'name': SearchDatastore_Task, 'duration_secs': 0.00921} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.397971] env[63345]: DEBUG oslo_concurrency.lockutils [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.398409] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 0fe61754-458c-4c5c-bb2d-2677302e5fb9/0fe61754-458c-4c5c-bb2d-2677302e5fb9.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 926.398812] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-097ba0ae-a335-4444-824d-7139d23b337d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.411980] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 926.411980] env[63345]: value = "task-1017460" [ 926.411980] env[63345]: _type = "Task" [ 926.411980] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.420760] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.589488] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "e5546a26-3f94-48a6-914a-2c37e63a0aeb" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.922788] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017460, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.424831] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017460, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.71564} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.427338] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 0fe61754-458c-4c5c-bb2d-2677302e5fb9/0fe61754-458c-4c5c-bb2d-2677302e5fb9.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 927.427579] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 927.428017] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e8ce8a2-eec3-428b-ad83-1fff4126541c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.435273] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 927.435273] env[63345]: value = "task-1017461" [ 927.435273] env[63345]: _type = "Task" [ 927.435273] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.446229] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017461, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.580200] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2414e64-567f-4c94-83c0-aee226b029a7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.587885] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfff5e45-92bd-4a07-9d68-8bebceb120d9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.618838] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceef07cb-1002-40cf-ae39-45eaabc7e614 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.632352] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abdf4e3e-4be2-419d-b4bd-a544226bf629 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.648817] env[63345]: DEBUG nova.compute.provider_tree [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.653898] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "e5546a26-3f94-48a6-914a-2c37e63a0aeb" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.654191] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "e5546a26-3f94-48a6-914a-2c37e63a0aeb" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.655023] env[63345]: INFO nova.compute.manager [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Attaching volume ce489aa2-8e07-4edd-b43a-7068ef1635be to /dev/sdb [ 927.695267] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a4cc55-8a64-462f-bf1a-d445f0c2971c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.703349] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e723973c-f367-489e-a092-c62731440af6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.720088] env[63345]: DEBUG nova.virt.block_device [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Updating existing volume attachment record: ed46a20c-fb79-4351-9d54-17ecd64b7d90 {{(pid=63345) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 927.944138] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017461, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086109} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.944431] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 927.945203] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ffb568d-b99c-414a-b160-60d84b9275fc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.966449] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 0fe61754-458c-4c5c-bb2d-2677302e5fb9/0fe61754-458c-4c5c-bb2d-2677302e5fb9.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 927.966705] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3c9543b-d61c-44ea-afc1-b031950940c0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.985344] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 927.985344] env[63345]: value = "task-1017463" [ 927.985344] env[63345]: _type = "Task" [ 927.985344] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.992987] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017463, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.151442] env[63345]: DEBUG nova.scheduler.client.report [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 928.496167] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017463, 'name': ReconfigVM_Task, 'duration_secs': 0.40263} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.496473] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 0fe61754-458c-4c5c-bb2d-2677302e5fb9/0fe61754-458c-4c5c-bb2d-2677302e5fb9.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 928.497234] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7926261d-a01a-4468-8a26-959425c583da {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.504432] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 928.504432] env[63345]: value = "task-1017466" [ 928.504432] env[63345]: _type = "Task" [ 928.504432] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.512412] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017466, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.646605] env[63345]: DEBUG oslo_concurrency.lockutils [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "interface-0da64b45-fa00-4fe8-8d1d-df586f27743f-0ca27a73-4f2c-47db-b68f-966110d6d772" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.646957] env[63345]: DEBUG oslo_concurrency.lockutils [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-0da64b45-fa00-4fe8-8d1d-df586f27743f-0ca27a73-4f2c-47db-b68f-966110d6d772" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.647294] env[63345]: DEBUG nova.objects.instance [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lazy-loading 'flavor' on Instance uuid 0da64b45-fa00-4fe8-8d1d-df586f27743f {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 928.656590] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.365s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.657145] env[63345]: DEBUG nova.compute.manager [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 928.661367] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.301s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.661693] env[63345]: DEBUG nova.objects.instance [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lazy-loading 'pci_requests' on Instance uuid 070a834d-6478-4705-8df0-2a27c8780507 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 929.014693] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017466, 'name': Rename_Task, 'duration_secs': 0.181078} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.014985] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 929.015239] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-366ffbdc-be2b-49c9-b4cd-76f81aa887e4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.021030] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 929.021030] env[63345]: value = "task-1017467" [ 929.021030] env[63345]: _type = "Task" [ 929.021030] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.028235] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017467, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.165192] env[63345]: DEBUG nova.objects.instance [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lazy-loading 'numa_topology' on Instance uuid 070a834d-6478-4705-8df0-2a27c8780507 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 929.167311] env[63345]: DEBUG nova.compute.utils [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 929.168911] env[63345]: DEBUG nova.compute.manager [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 929.169140] env[63345]: DEBUG nova.network.neutron [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 929.241527] env[63345]: DEBUG nova.policy [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3754c2317404a48a80cfee69f1044ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57e386920081487583ea143003aca8c4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 929.303612] env[63345]: DEBUG nova.objects.instance [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lazy-loading 'pci_requests' on Instance uuid 0da64b45-fa00-4fe8-8d1d-df586f27743f {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 929.532450] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017467, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.634858] env[63345]: DEBUG nova.network.neutron [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Successfully created port: 114e38e0-a558-4242-ad5b-4aac063dcb72 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 929.669794] env[63345]: INFO nova.compute.claims [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 929.673242] env[63345]: DEBUG nova.compute.manager [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 929.807059] env[63345]: DEBUG nova.objects.base [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Object Instance<0da64b45-fa00-4fe8-8d1d-df586f27743f> lazy-loaded attributes: flavor,pci_requests {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 929.807335] env[63345]: DEBUG nova.network.neutron [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 929.914737] env[63345]: DEBUG nova.policy [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e36fd04030444217acadbbf4e4fe9be0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33c28bfca4da460e8ca96dc7519204c8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 930.031761] env[63345]: DEBUG oslo_vmware.api [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017467, 'name': PowerOnVM_Task, 'duration_secs': 0.602441} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.032068] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 930.032278] env[63345]: INFO nova.compute.manager [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Took 8.69 seconds to spawn the instance on the hypervisor. [ 930.032485] env[63345]: DEBUG nova.compute.manager [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 930.033292] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1703216e-9eb3-49ae-84f0-f228365f0e76 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.552024] env[63345]: INFO nova.compute.manager [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Took 33.98 seconds to build instance. [ 930.683268] env[63345]: DEBUG nova.compute.manager [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 930.710311] env[63345]: DEBUG nova.virt.hardware [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 930.710577] env[63345]: DEBUG nova.virt.hardware [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 930.710857] env[63345]: DEBUG nova.virt.hardware [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 930.710933] env[63345]: DEBUG nova.virt.hardware [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 930.711149] env[63345]: DEBUG nova.virt.hardware [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 930.711235] env[63345]: DEBUG nova.virt.hardware [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 930.711513] env[63345]: DEBUG nova.virt.hardware [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 930.712015] env[63345]: DEBUG nova.virt.hardware [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 930.712277] env[63345]: DEBUG nova.virt.hardware [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 930.712407] env[63345]: DEBUG nova.virt.hardware [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 930.712681] env[63345]: DEBUG nova.virt.hardware [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 930.713526] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b300b57a-309d-4fc5-ab3d-2d509e81d1e4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.725240] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a69cc2-5f41-42f6-a93c-044f5f872bff {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.922540] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c74822b-358d-43b1-b68b-8a130db550aa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.930270] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96814f47-8e1a-4062-82f8-ad647bd01b1c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.959330] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861ade35-bfb3-46e1-8c1f-116040d465bc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.966998] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0417f20e-13f8-4c00-be67-6b9f4815cd4e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.981509] env[63345]: DEBUG nova.compute.provider_tree [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.053969] env[63345]: DEBUG oslo_concurrency.lockutils [None req-93483604-10cc-4f88-bf09-7b04ed117819 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "0fe61754-458c-4c5c-bb2d-2677302e5fb9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.495s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.486835] env[63345]: DEBUG nova.scheduler.client.report [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 931.878340] env[63345]: DEBUG nova.compute.manager [req-a0050bbe-0763-4d38-99aa-c886cba05d1b req-76a1806b-a57b-4b91-8b25-072448049087 service nova] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Received event network-vif-plugged-114e38e0-a558-4242-ad5b-4aac063dcb72 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 931.878576] env[63345]: DEBUG oslo_concurrency.lockutils [req-a0050bbe-0763-4d38-99aa-c886cba05d1b req-76a1806b-a57b-4b91-8b25-072448049087 service nova] Acquiring lock "726332dd-8699-49a4-a9ea-b9cbfc159855-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.878778] env[63345]: DEBUG oslo_concurrency.lockutils [req-a0050bbe-0763-4d38-99aa-c886cba05d1b req-76a1806b-a57b-4b91-8b25-072448049087 service nova] Lock "726332dd-8699-49a4-a9ea-b9cbfc159855-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.878957] env[63345]: DEBUG oslo_concurrency.lockutils [req-a0050bbe-0763-4d38-99aa-c886cba05d1b req-76a1806b-a57b-4b91-8b25-072448049087 service nova] Lock "726332dd-8699-49a4-a9ea-b9cbfc159855-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.879414] env[63345]: DEBUG nova.compute.manager [req-a0050bbe-0763-4d38-99aa-c886cba05d1b req-76a1806b-a57b-4b91-8b25-072448049087 service nova] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] No waiting events found dispatching network-vif-plugged-114e38e0-a558-4242-ad5b-4aac063dcb72 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 931.879683] env[63345]: WARNING nova.compute.manager [req-a0050bbe-0763-4d38-99aa-c886cba05d1b req-76a1806b-a57b-4b91-8b25-072448049087 service nova] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Received unexpected event network-vif-plugged-114e38e0-a558-4242-ad5b-4aac063dcb72 for instance with vm_state building and task_state spawning. [ 931.936070] env[63345]: DEBUG nova.network.neutron [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Successfully updated port: 0ca27a73-4f2c-47db-b68f-966110d6d772 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 931.990350] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.329s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.992596] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.506s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.992803] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.994487] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.269s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.994681] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.996507] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.105s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.996707] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.998186] env[63345]: DEBUG oslo_concurrency.lockutils [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.144s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.999616] env[63345]: INFO nova.compute.claims [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 932.023839] env[63345]: INFO nova.scheduler.client.report [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Deleted allocations for instance 27e2cb12-d251-434a-b79e-6fbda80d3637 [ 932.023839] env[63345]: INFO nova.scheduler.client.report [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Deleted allocations for instance 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01 [ 932.037875] env[63345]: INFO nova.network.neutron [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Updating port cf06de95-5747-4226-b66c-b9ccca47321d with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 932.040358] env[63345]: INFO nova.scheduler.client.report [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Deleted allocations for instance a415d4f2-abc7-4553-8442-312316e686b2 [ 932.051689] env[63345]: DEBUG nova.compute.manager [req-63bbdb68-4e97-4c72-b1f9-2cbf94e676dc req-f3b67366-124c-4a85-9dda-3e7a1c438db4 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Received event network-vif-plugged-0ca27a73-4f2c-47db-b68f-966110d6d772 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 932.051921] env[63345]: DEBUG oslo_concurrency.lockutils [req-63bbdb68-4e97-4c72-b1f9-2cbf94e676dc req-f3b67366-124c-4a85-9dda-3e7a1c438db4 service nova] Acquiring lock "0da64b45-fa00-4fe8-8d1d-df586f27743f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.052166] env[63345]: DEBUG oslo_concurrency.lockutils [req-63bbdb68-4e97-4c72-b1f9-2cbf94e676dc req-f3b67366-124c-4a85-9dda-3e7a1c438db4 service nova] Lock "0da64b45-fa00-4fe8-8d1d-df586f27743f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.052389] env[63345]: DEBUG oslo_concurrency.lockutils [req-63bbdb68-4e97-4c72-b1f9-2cbf94e676dc req-f3b67366-124c-4a85-9dda-3e7a1c438db4 service nova] Lock "0da64b45-fa00-4fe8-8d1d-df586f27743f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.052633] env[63345]: DEBUG nova.compute.manager [req-63bbdb68-4e97-4c72-b1f9-2cbf94e676dc req-f3b67366-124c-4a85-9dda-3e7a1c438db4 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] No waiting events found dispatching network-vif-plugged-0ca27a73-4f2c-47db-b68f-966110d6d772 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 932.052760] env[63345]: WARNING nova.compute.manager [req-63bbdb68-4e97-4c72-b1f9-2cbf94e676dc req-f3b67366-124c-4a85-9dda-3e7a1c438db4 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Received unexpected event network-vif-plugged-0ca27a73-4f2c-47db-b68f-966110d6d772 for instance with vm_state active and task_state None. [ 932.247834] env[63345]: DEBUG nova.network.neutron [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Successfully updated port: 114e38e0-a558-4242-ad5b-4aac063dcb72 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 932.436897] env[63345]: DEBUG oslo_concurrency.lockutils [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.437197] env[63345]: DEBUG oslo_concurrency.lockutils [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.437540] env[63345]: DEBUG nova.network.neutron [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 932.534826] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6284547f-525f-42d1-ac38-525740462381 tempest-ImagesTestJSON-171924145 tempest-ImagesTestJSON-171924145-project-member] Lock "0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.089s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.535981] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44885277-f439-46df-b61e-45e89b2a79d4 tempest-ImagesOneServerNegativeTestJSON-1510602040 tempest-ImagesOneServerNegativeTestJSON-1510602040-project-member] Lock "27e2cb12-d251-434a-b79e-6fbda80d3637" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.327s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.547879] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f9b0d2b-afa1-4fff-b083-a7a9a4604fd0 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "a415d4f2-abc7-4553-8442-312316e686b2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.704s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.680589] env[63345]: DEBUG nova.compute.manager [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Stashing vm_state: active {{(pid=63345) _prep_resize /opt/stack/nova/nova/compute/manager.py:5953}} [ 932.748633] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.748788] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.748935] env[63345]: DEBUG nova.network.neutron [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 932.769954] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Volume attach. Driver type: vmdk {{(pid=63345) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 932.770229] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226105', 'volume_id': 'ce489aa2-8e07-4edd-b43a-7068ef1635be', 'name': 'volume-ce489aa2-8e07-4edd-b43a-7068ef1635be', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e5546a26-3f94-48a6-914a-2c37e63a0aeb', 'attached_at': '', 'detached_at': '', 'volume_id': 'ce489aa2-8e07-4edd-b43a-7068ef1635be', 'serial': 'ce489aa2-8e07-4edd-b43a-7068ef1635be'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 932.771122] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0294d56e-55e1-4087-8c2d-d567e13bcfca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.788406] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e72c2d-4754-43f0-9fe7-3feb3bf4c5ce {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.814113] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] volume-ce489aa2-8e07-4edd-b43a-7068ef1635be/volume-ce489aa2-8e07-4edd-b43a-7068ef1635be.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 932.814500] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abc633d5-af9d-40b3-88ed-927f00190e89 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.833617] env[63345]: DEBUG oslo_vmware.api [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 932.833617] env[63345]: value = "task-1017469" [ 932.833617] env[63345]: _type = "Task" [ 932.833617] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.876059] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "dde93fd5-6312-4d91-b041-b7fc84b207d3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.876322] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "dde93fd5-6312-4d91-b041-b7fc84b207d3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.876535] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "dde93fd5-6312-4d91-b041-b7fc84b207d3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.876728] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "dde93fd5-6312-4d91-b041-b7fc84b207d3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.876906] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "dde93fd5-6312-4d91-b041-b7fc84b207d3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.879051] env[63345]: INFO nova.compute.manager [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Terminating instance [ 932.988545] env[63345]: WARNING nova.network.neutron [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] b360ab0d-3deb-4632-a8d5-c1639db9e9e2 already exists in list: networks containing: ['b360ab0d-3deb-4632-a8d5-c1639db9e9e2']. ignoring it [ 932.988784] env[63345]: WARNING nova.network.neutron [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] b360ab0d-3deb-4632-a8d5-c1639db9e9e2 already exists in list: networks containing: ['b360ab0d-3deb-4632-a8d5-c1639db9e9e2']. ignoring it [ 933.200919] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.271723] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398179b0-a47b-42b1-8033-634ff1b4e3c0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.280704] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de47b1f-efc5-45e2-b63a-b5aa600381c3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.314287] env[63345]: DEBUG nova.network.neutron [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 933.318558] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dff5647-fe5e-4939-9f22-d9600d4eb75c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.326763] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a557498-51bd-40c8-830e-b3508cc396bb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.340556] env[63345]: DEBUG nova.compute.provider_tree [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.353092] env[63345]: DEBUG oslo_vmware.api [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017469, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.382612] env[63345]: DEBUG nova.compute.manager [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 933.382960] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 933.385029] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00025c02-0193-42b8-90d2-db5eab9e3b97 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.393146] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 933.393407] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2e9f3e8-2b52-4949-82d5-cc8b0988bee4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.404100] env[63345]: DEBUG oslo_vmware.api [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 933.404100] env[63345]: value = "task-1017470" [ 933.404100] env[63345]: _type = "Task" [ 933.404100] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.415496] env[63345]: DEBUG oslo_vmware.api [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017470, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.487289] env[63345]: DEBUG nova.network.neutron [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updating instance_info_cache with network_info: [{"id": "114e38e0-a558-4242-ad5b-4aac063dcb72", "address": "fa:16:3e:bb:2c:f8", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap114e38e0-a5", "ovs_interfaceid": "114e38e0-a558-4242-ad5b-4aac063dcb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.774638] env[63345]: DEBUG nova.network.neutron [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Updating instance_info_cache with network_info: [{"id": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "address": "fa:16:3e:80:1f:9f", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e054cb2-eb", "ovs_interfaceid": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2b931f56-815d-48ec-915d-c68e2ae0333f", "address": "fa:16:3e:a3:be:c5", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b931f56-81", "ovs_interfaceid": "2b931f56-815d-48ec-915d-c68e2ae0333f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0ca27a73-4f2c-47db-b68f-966110d6d772", "address": "fa:16:3e:4d:52:45", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ca27a73-4f", "ovs_interfaceid": "0ca27a73-4f2c-47db-b68f-966110d6d772", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.853021] env[63345]: DEBUG nova.scheduler.client.report [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 933.853414] env[63345]: DEBUG oslo_vmware.api [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017469, 'name': ReconfigVM_Task, 'duration_secs': 0.779091} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.854017] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Reconfigured VM instance instance-00000057 to attach disk [datastore2] volume-ce489aa2-8e07-4edd-b43a-7068ef1635be/volume-ce489aa2-8e07-4edd-b43a-7068ef1635be.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 933.859466] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13d85646-69cb-4443-98a4-085cea07341d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.876218] env[63345]: DEBUG oslo_vmware.api [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 933.876218] env[63345]: value = "task-1017471" [ 933.876218] env[63345]: _type = "Task" [ 933.876218] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.885449] env[63345]: DEBUG oslo_vmware.api [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017471, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.914760] env[63345]: DEBUG oslo_vmware.api [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017470, 'name': PowerOffVM_Task, 'duration_secs': 0.496217} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.915343] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 933.915663] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 933.916048] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc361053-ae12-412a-bdf8-89f2a7278ed4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.993140] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.993140] env[63345]: DEBUG nova.compute.manager [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Instance network_info: |[{"id": "114e38e0-a558-4242-ad5b-4aac063dcb72", "address": "fa:16:3e:bb:2c:f8", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap114e38e0-a5", "ovs_interfaceid": "114e38e0-a558-4242-ad5b-4aac063dcb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 933.993140] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:2c:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '114e38e0-a558-4242-ad5b-4aac063dcb72', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 934.000546] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Creating folder: Project (57e386920081487583ea143003aca8c4). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 934.002732] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d427f349-dd14-4086-a535-ed07197622df {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.005113] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 934.005571] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Deleting contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 934.005913] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Deleting the datastore file [datastore1] dde93fd5-6312-4d91-b041-b7fc84b207d3 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 934.006325] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3180885-e8e8-424e-9d20-8cd7104afe1f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.014530] env[63345]: DEBUG oslo_vmware.api [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for the task: (returnval){ [ 934.014530] env[63345]: value = "task-1017474" [ 934.014530] env[63345]: _type = "Task" [ 934.014530] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.019820] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Created folder: Project (57e386920081487583ea143003aca8c4) in parent group-v225918. [ 934.020196] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Creating folder: Instances. Parent ref: group-v226106. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 934.021143] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03b58eb8-dad7-481e-bcc2-ce8241fa4406 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.025696] env[63345]: DEBUG oslo_vmware.api [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017474, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.038120] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Created folder: Instances in parent group-v226106. [ 934.038120] env[63345]: DEBUG oslo.service.loopingcall [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 934.038120] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 934.038120] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e500ef44-b27e-424a-ba18-c3bbf73edf56 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.060021] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 934.060021] env[63345]: value = "task-1017476" [ 934.060021] env[63345]: _type = "Task" [ 934.060021] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.068955] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017476, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.281020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.281020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.281020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.281020] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f210a252-fdc3-4f14-99f9-c9e5373a9a76 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.303453] env[63345]: DEBUG nova.virt.hardware [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 934.303990] env[63345]: DEBUG nova.virt.hardware [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 934.304323] env[63345]: DEBUG nova.virt.hardware [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 934.304687] env[63345]: DEBUG nova.virt.hardware [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 934.304994] env[63345]: DEBUG nova.virt.hardware [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 934.305328] env[63345]: DEBUG nova.virt.hardware [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 934.306541] env[63345]: DEBUG nova.virt.hardware [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 934.306889] env[63345]: DEBUG nova.virt.hardware [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 934.309032] env[63345]: DEBUG nova.virt.hardware [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 934.309032] env[63345]: DEBUG nova.virt.hardware [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 934.309032] env[63345]: DEBUG nova.virt.hardware [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 934.314160] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Reconfiguring VM to attach interface {{(pid=63345) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 934.314635] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66b28869-4935-41e0-b311-ae0935624c56 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.336604] env[63345]: DEBUG oslo_vmware.api [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 934.336604] env[63345]: value = "task-1017477" [ 934.336604] env[63345]: _type = "Task" [ 934.336604] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.343782] env[63345]: DEBUG oslo_vmware.api [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017477, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.357417] env[63345]: DEBUG oslo_concurrency.lockutils [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.357s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.357417] env[63345]: DEBUG nova.compute.manager [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 934.360984] env[63345]: DEBUG oslo_concurrency.lockutils [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.319s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.361658] env[63345]: DEBUG oslo_concurrency.lockutils [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.364404] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.672s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.366709] env[63345]: DEBUG nova.objects.instance [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lazy-loading 'pci_requests' on Instance uuid 9aa651b8-317d-4153-8c33-9df0a5d16115 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 934.388381] env[63345]: DEBUG nova.compute.manager [req-ef7ddbcf-5f61-44a5-8d89-05f854e2a0ff req-b4f18310-f384-4e58-8145-9492095e0cc9 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Received event network-changed-0ca27a73-4f2c-47db-b68f-966110d6d772 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 934.392150] env[63345]: DEBUG nova.compute.manager [req-ef7ddbcf-5f61-44a5-8d89-05f854e2a0ff req-b4f18310-f384-4e58-8145-9492095e0cc9 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Refreshing instance network info cache due to event network-changed-0ca27a73-4f2c-47db-b68f-966110d6d772. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 934.392150] env[63345]: DEBUG oslo_concurrency.lockutils [req-ef7ddbcf-5f61-44a5-8d89-05f854e2a0ff req-b4f18310-f384-4e58-8145-9492095e0cc9 service nova] Acquiring lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.392150] env[63345]: DEBUG oslo_concurrency.lockutils [req-ef7ddbcf-5f61-44a5-8d89-05f854e2a0ff req-b4f18310-f384-4e58-8145-9492095e0cc9 service nova] Acquired lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.392150] env[63345]: DEBUG nova.network.neutron [req-ef7ddbcf-5f61-44a5-8d89-05f854e2a0ff req-b4f18310-f384-4e58-8145-9492095e0cc9 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Refreshing network info cache for port 0ca27a73-4f2c-47db-b68f-966110d6d772 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 934.396018] env[63345]: DEBUG oslo_vmware.api [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017471, 'name': ReconfigVM_Task, 'duration_secs': 0.180153} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.397309] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226105', 'volume_id': 'ce489aa2-8e07-4edd-b43a-7068ef1635be', 'name': 'volume-ce489aa2-8e07-4edd-b43a-7068ef1635be', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e5546a26-3f94-48a6-914a-2c37e63a0aeb', 'attached_at': '', 'detached_at': '', 'volume_id': 'ce489aa2-8e07-4edd-b43a-7068ef1635be', 'serial': 'ce489aa2-8e07-4edd-b43a-7068ef1635be'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 934.434685] env[63345]: INFO nova.scheduler.client.report [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Deleted allocations for instance bc9d2e6a-f77a-4a21-90bc-81949cbfce91 [ 934.487894] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.488446] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquired lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.488446] env[63345]: DEBUG nova.network.neutron [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 934.525059] env[63345]: DEBUG oslo_vmware.api [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Task: {'id': task-1017474, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226769} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.525333] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 934.525530] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Deleted contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 934.525717] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 934.525899] env[63345]: INFO nova.compute.manager [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Took 1.14 seconds to destroy the instance on the hypervisor. [ 934.526167] env[63345]: DEBUG oslo.service.loopingcall [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 934.526372] env[63345]: DEBUG nova.compute.manager [-] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 934.526468] env[63345]: DEBUG nova.network.neutron [-] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 934.568573] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017476, 'name': CreateVM_Task, 'duration_secs': 0.428785} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.570603] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 934.571533] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.571736] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.572545] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 934.573091] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b762d24-06ad-430e-a8d2-463ac115765f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.577470] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 934.577470] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52a34fb1-8629-f4d9-9238-d1aec139ae6a" [ 934.577470] env[63345]: _type = "Task" [ 934.577470] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.585016] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a34fb1-8629-f4d9-9238-d1aec139ae6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.845898] env[63345]: DEBUG oslo_vmware.api [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017477, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.868337] env[63345]: DEBUG nova.compute.utils [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 934.870914] env[63345]: DEBUG nova.objects.instance [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lazy-loading 'numa_topology' on Instance uuid 9aa651b8-317d-4153-8c33-9df0a5d16115 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 934.872985] env[63345]: DEBUG nova.compute.manager [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 934.872985] env[63345]: DEBUG nova.network.neutron [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 934.943547] env[63345]: DEBUG oslo_concurrency.lockutils [None req-419e3c65-74ef-4f96-b1f3-04fd5006a001 tempest-VolumesAdminNegativeTest-2048491753 tempest-VolumesAdminNegativeTest-2048491753-project-member] Lock "bc9d2e6a-f77a-4a21-90bc-81949cbfce91" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.484s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.099752] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a34fb1-8629-f4d9-9238-d1aec139ae6a, 'name': SearchDatastore_Task, 'duration_secs': 0.011546} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.099752] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.099752] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 935.099752] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.099752] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.100075] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 935.100211] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3cb41f61-2915-454e-b596-f22c962eb1a1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.111073] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 935.111451] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 935.112600] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d1be97a-5464-4cc8-8808-d004323c33a8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.119477] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 935.119477] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f9cbcf-1a2c-68ba-e684-df01d0dc2e92" [ 935.119477] env[63345]: _type = "Task" [ 935.119477] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.131432] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f9cbcf-1a2c-68ba-e684-df01d0dc2e92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.164830] env[63345]: DEBUG nova.policy [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fce058d27d8e4da19af436b282b37f32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63d7b3facae6416989f763e610cf98f7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 935.351567] env[63345]: DEBUG oslo_vmware.api [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017477, 'name': ReconfigVM_Task, 'duration_secs': 0.765189} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.352130] env[63345]: DEBUG oslo_concurrency.lockutils [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.352348] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Reconfigured VM to attach interface {{(pid=63345) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 935.360016] env[63345]: DEBUG nova.network.neutron [-] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.373535] env[63345]: DEBUG nova.compute.manager [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 935.377925] env[63345]: INFO nova.compute.claims [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 935.465115] env[63345]: DEBUG nova.objects.instance [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lazy-loading 'flavor' on Instance uuid e5546a26-3f94-48a6-914a-2c37e63a0aeb {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 935.514934] env[63345]: DEBUG nova.network.neutron [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Updating instance_info_cache with network_info: [{"id": "cf06de95-5747-4226-b66c-b9ccca47321d", "address": "fa:16:3e:9e:46:44", "network": {"id": "403ac06e-e45e-4215-bf0c-16ddd583ddc5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1349318740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac5c2a653dae436c97514507939c4e3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf06de95-57", "ovs_interfaceid": "cf06de95-5747-4226-b66c-b9ccca47321d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.625891] env[63345]: DEBUG nova.network.neutron [req-ef7ddbcf-5f61-44a5-8d89-05f854e2a0ff req-b4f18310-f384-4e58-8145-9492095e0cc9 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Updated VIF entry in instance network info cache for port 0ca27a73-4f2c-47db-b68f-966110d6d772. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 935.626421] env[63345]: DEBUG nova.network.neutron [req-ef7ddbcf-5f61-44a5-8d89-05f854e2a0ff req-b4f18310-f384-4e58-8145-9492095e0cc9 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Updating instance_info_cache with network_info: [{"id": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "address": "fa:16:3e:80:1f:9f", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e054cb2-eb", "ovs_interfaceid": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2b931f56-815d-48ec-915d-c68e2ae0333f", "address": "fa:16:3e:a3:be:c5", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b931f56-81", "ovs_interfaceid": "2b931f56-815d-48ec-915d-c68e2ae0333f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0ca27a73-4f2c-47db-b68f-966110d6d772", "address": "fa:16:3e:4d:52:45", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ca27a73-4f", "ovs_interfaceid": "0ca27a73-4f2c-47db-b68f-966110d6d772", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.635028] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f9cbcf-1a2c-68ba-e684-df01d0dc2e92, 'name': SearchDatastore_Task, 'duration_secs': 0.014214} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.641919] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6fb4b50-e0db-40c1-adfd-2d45e6421d49 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.650452] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 935.650452] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c32c5f-bff1-8eda-d288-a3da200e2213" [ 935.650452] env[63345]: _type = "Task" [ 935.650452] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.658062] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c32c5f-bff1-8eda-d288-a3da200e2213, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.734686] env[63345]: DEBUG nova.network.neutron [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Successfully created port: 065f6e50-8edf-4eac-a2e3-d944aa6f33ba {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 935.858910] env[63345]: DEBUG oslo_concurrency.lockutils [None req-560a17a5-d7b2-4d00-9b9f-b78499b48a70 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-0da64b45-fa00-4fe8-8d1d-df586f27743f-0ca27a73-4f2c-47db-b68f-966110d6d772" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.212s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.860190] env[63345]: INFO nova.compute.manager [-] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Took 1.33 seconds to deallocate network for instance. [ 935.974063] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5a4f49d5-6326-4b77-8401-63f570982af7 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "e5546a26-3f94-48a6-914a-2c37e63a0aeb" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.320s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.021996] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Releasing lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.064069] env[63345]: DEBUG nova.virt.hardware [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='d610f9cbd7be66c95500299795d9af6a',container_format='bare',created_at=2024-09-30T09:39:54Z,direct_url=,disk_format='vmdk',id=163b8ba5-dbfa-4890-b990-7e227e0ccf91,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1214242986-shelved',owner='ac5c2a653dae436c97514507939c4e3c',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2024-09-30T09:40:13Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 936.064535] env[63345]: DEBUG nova.virt.hardware [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 936.064825] env[63345]: DEBUG nova.virt.hardware [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 936.065296] env[63345]: DEBUG nova.virt.hardware [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 936.065584] env[63345]: DEBUG nova.virt.hardware [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 936.065857] env[63345]: DEBUG nova.virt.hardware [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 936.066420] env[63345]: DEBUG nova.virt.hardware [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 936.066721] env[63345]: DEBUG nova.virt.hardware [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 936.067031] env[63345]: DEBUG nova.virt.hardware [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 936.067423] env[63345]: DEBUG nova.virt.hardware [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 936.067734] env[63345]: DEBUG nova.virt.hardware [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 936.069573] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68688af9-9e36-4b9a-bc53-5d0318126132 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.080419] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273158cf-6428-402a-99e3-b255e3dddab5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.095700] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:46:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e41c97-4d75-4041-ae71-321e7e9d480b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf06de95-5747-4226-b66c-b9ccca47321d', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 936.103279] env[63345]: DEBUG oslo.service.loopingcall [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 936.103603] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 936.103756] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c420470-c33a-4391-ab41-686f9333bc1b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.125063] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 936.125063] env[63345]: value = "task-1017478" [ 936.125063] env[63345]: _type = "Task" [ 936.125063] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.131103] env[63345]: DEBUG oslo_concurrency.lockutils [req-ef7ddbcf-5f61-44a5-8d89-05f854e2a0ff req-b4f18310-f384-4e58-8145-9492095e0cc9 service nova] Releasing lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.131386] env[63345]: DEBUG nova.compute.manager [req-ef7ddbcf-5f61-44a5-8d89-05f854e2a0ff req-b4f18310-f384-4e58-8145-9492095e0cc9 service nova] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Received event network-changed-114e38e0-a558-4242-ad5b-4aac063dcb72 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 936.131557] env[63345]: DEBUG nova.compute.manager [req-ef7ddbcf-5f61-44a5-8d89-05f854e2a0ff req-b4f18310-f384-4e58-8145-9492095e0cc9 service nova] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Refreshing instance network info cache due to event network-changed-114e38e0-a558-4242-ad5b-4aac063dcb72. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 936.131784] env[63345]: DEBUG oslo_concurrency.lockutils [req-ef7ddbcf-5f61-44a5-8d89-05f854e2a0ff req-b4f18310-f384-4e58-8145-9492095e0cc9 service nova] Acquiring lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.131929] env[63345]: DEBUG oslo_concurrency.lockutils [req-ef7ddbcf-5f61-44a5-8d89-05f854e2a0ff req-b4f18310-f384-4e58-8145-9492095e0cc9 service nova] Acquired lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.132216] env[63345]: DEBUG nova.network.neutron [req-ef7ddbcf-5f61-44a5-8d89-05f854e2a0ff req-b4f18310-f384-4e58-8145-9492095e0cc9 service nova] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Refreshing network info cache for port 114e38e0-a558-4242-ad5b-4aac063dcb72 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 936.136346] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017478, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.159041] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c32c5f-bff1-8eda-d288-a3da200e2213, 'name': SearchDatastore_Task, 'duration_secs': 0.012924} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.159593] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.159684] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 726332dd-8699-49a4-a9ea-b9cbfc159855/726332dd-8699-49a4-a9ea-b9cbfc159855.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 936.160014] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ba76917-3a80-49ca-a164-999784f32894 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.166281] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 936.166281] env[63345]: value = "task-1017479" [ 936.166281] env[63345]: _type = "Task" [ 936.166281] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.173826] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017479, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.369545] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.397295] env[63345]: DEBUG nova.compute.manager [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 936.451719] env[63345]: DEBUG nova.virt.hardware [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 936.452194] env[63345]: DEBUG nova.virt.hardware [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 936.452421] env[63345]: DEBUG nova.virt.hardware [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 936.452640] env[63345]: DEBUG nova.virt.hardware [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 936.453044] env[63345]: DEBUG nova.virt.hardware [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 936.453305] env[63345]: DEBUG nova.virt.hardware [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 936.453659] env[63345]: DEBUG nova.virt.hardware [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 936.454456] env[63345]: DEBUG nova.virt.hardware [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 936.454456] env[63345]: DEBUG nova.virt.hardware [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 936.454456] env[63345]: DEBUG nova.virt.hardware [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 936.454737] env[63345]: DEBUG nova.virt.hardware [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 936.455620] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae1a810-d3f8-4aa1-a043-c9e61a171ab6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.472606] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d6d07b-9406-4c73-911b-48aac09cbca2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.569911] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "e5546a26-3f94-48a6-914a-2c37e63a0aeb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.570628] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "e5546a26-3f94-48a6-914a-2c37e63a0aeb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.571041] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "e5546a26-3f94-48a6-914a-2c37e63a0aeb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.571349] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "e5546a26-3f94-48a6-914a-2c37e63a0aeb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.571495] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "e5546a26-3f94-48a6-914a-2c37e63a0aeb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.574803] env[63345]: INFO nova.compute.manager [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Terminating instance [ 936.646317] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017478, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.683051] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017479, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.803636] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50827c53-a478-4ea2-ba59-4f8f1ca4744a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.815758] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d5d2b9-680e-430b-bfb1-265d25c13dec {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.858036] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2ccdaa-35e2-4336-bb2c-1006afc84271 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.868097] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a9919b-426a-4288-ab8e-371a6db90d75 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.886755] env[63345]: DEBUG nova.compute.provider_tree [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 937.010466] env[63345]: DEBUG nova.compute.manager [req-f1d7727d-1244-4b39-b5e9-4cfd7198a94e req-c777d141-a615-49b3-98f5-65c96ce820f2 service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Received event network-vif-plugged-cf06de95-5747-4226-b66c-b9ccca47321d {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 937.010466] env[63345]: DEBUG oslo_concurrency.lockutils [req-f1d7727d-1244-4b39-b5e9-4cfd7198a94e req-c777d141-a615-49b3-98f5-65c96ce820f2 service nova] Acquiring lock "070a834d-6478-4705-8df0-2a27c8780507-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.010466] env[63345]: DEBUG oslo_concurrency.lockutils [req-f1d7727d-1244-4b39-b5e9-4cfd7198a94e req-c777d141-a615-49b3-98f5-65c96ce820f2 service nova] Lock "070a834d-6478-4705-8df0-2a27c8780507-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.010466] env[63345]: DEBUG oslo_concurrency.lockutils [req-f1d7727d-1244-4b39-b5e9-4cfd7198a94e req-c777d141-a615-49b3-98f5-65c96ce820f2 service nova] Lock "070a834d-6478-4705-8df0-2a27c8780507-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.011053] env[63345]: DEBUG nova.compute.manager [req-f1d7727d-1244-4b39-b5e9-4cfd7198a94e req-c777d141-a615-49b3-98f5-65c96ce820f2 service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] No waiting events found dispatching network-vif-plugged-cf06de95-5747-4226-b66c-b9ccca47321d {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 937.011053] env[63345]: WARNING nova.compute.manager [req-f1d7727d-1244-4b39-b5e9-4cfd7198a94e req-c777d141-a615-49b3-98f5-65c96ce820f2 service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Received unexpected event network-vif-plugged-cf06de95-5747-4226-b66c-b9ccca47321d for instance with vm_state shelved_offloaded and task_state spawning. [ 937.011053] env[63345]: DEBUG nova.compute.manager [req-f1d7727d-1244-4b39-b5e9-4cfd7198a94e req-c777d141-a615-49b3-98f5-65c96ce820f2 service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Received event network-changed-cf06de95-5747-4226-b66c-b9ccca47321d {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 937.011291] env[63345]: DEBUG nova.compute.manager [req-f1d7727d-1244-4b39-b5e9-4cfd7198a94e req-c777d141-a615-49b3-98f5-65c96ce820f2 service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Refreshing instance network info cache due to event network-changed-cf06de95-5747-4226-b66c-b9ccca47321d. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 937.011565] env[63345]: DEBUG oslo_concurrency.lockutils [req-f1d7727d-1244-4b39-b5e9-4cfd7198a94e req-c777d141-a615-49b3-98f5-65c96ce820f2 service nova] Acquiring lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.011778] env[63345]: DEBUG oslo_concurrency.lockutils [req-f1d7727d-1244-4b39-b5e9-4cfd7198a94e req-c777d141-a615-49b3-98f5-65c96ce820f2 service nova] Acquired lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.012013] env[63345]: DEBUG nova.network.neutron [req-f1d7727d-1244-4b39-b5e9-4cfd7198a94e req-c777d141-a615-49b3-98f5-65c96ce820f2 service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Refreshing network info cache for port cf06de95-5747-4226-b66c-b9ccca47321d {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 937.081832] env[63345]: DEBUG nova.compute.manager [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 937.082364] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 937.083097] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92454cb7-7270-4fd6-84aa-79e472d2f284 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.092321] env[63345]: DEBUG oslo_vmware.api [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 937.092321] env[63345]: value = "task-1017480" [ 937.092321] env[63345]: _type = "Task" [ 937.092321] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.104024] env[63345]: DEBUG oslo_vmware.api [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017480, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.136957] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017478, 'name': CreateVM_Task, 'duration_secs': 0.680255} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.138095] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 937.138095] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/163b8ba5-dbfa-4890-b990-7e227e0ccf91" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.138095] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquired lock "[datastore2] devstack-image-cache_base/163b8ba5-dbfa-4890-b990-7e227e0ccf91" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.138353] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/163b8ba5-dbfa-4890-b990-7e227e0ccf91" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 937.141228] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb23f4c6-71d2-4e33-9e7f-8abfc65bd050 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.147100] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 937.147100] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52ad6b6e-4ccb-4560-3f8e-b52773e064e7" [ 937.147100] env[63345]: _type = "Task" [ 937.147100] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.156080] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52ad6b6e-4ccb-4560-3f8e-b52773e064e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.180294] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017479, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.651399} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.180780] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 726332dd-8699-49a4-a9ea-b9cbfc159855/726332dd-8699-49a4-a9ea-b9cbfc159855.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 937.182197] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 937.182197] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-171d3e2d-6a46-484a-a582-ebd9b6a494f1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.189730] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 937.189730] env[63345]: value = "task-1017481" [ 937.189730] env[63345]: _type = "Task" [ 937.189730] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.199596] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017481, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.391026] env[63345]: DEBUG nova.scheduler.client.report [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 937.548018] env[63345]: DEBUG nova.network.neutron [req-ef7ddbcf-5f61-44a5-8d89-05f854e2a0ff req-b4f18310-f384-4e58-8145-9492095e0cc9 service nova] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updated VIF entry in instance network info cache for port 114e38e0-a558-4242-ad5b-4aac063dcb72. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 937.548018] env[63345]: DEBUG nova.network.neutron [req-ef7ddbcf-5f61-44a5-8d89-05f854e2a0ff req-b4f18310-f384-4e58-8145-9492095e0cc9 service nova] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updating instance_info_cache with network_info: [{"id": "114e38e0-a558-4242-ad5b-4aac063dcb72", "address": "fa:16:3e:bb:2c:f8", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap114e38e0-a5", "ovs_interfaceid": "114e38e0-a558-4242-ad5b-4aac063dcb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.603286] env[63345]: DEBUG oslo_vmware.api [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017480, 'name': PowerOffVM_Task, 'duration_secs': 0.443914} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.603562] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 937.603768] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Volume detach. Driver type: vmdk {{(pid=63345) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 937.603969] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226105', 'volume_id': 'ce489aa2-8e07-4edd-b43a-7068ef1635be', 'name': 'volume-ce489aa2-8e07-4edd-b43a-7068ef1635be', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e5546a26-3f94-48a6-914a-2c37e63a0aeb', 'attached_at': '', 'detached_at': '', 'volume_id': 'ce489aa2-8e07-4edd-b43a-7068ef1635be', 'serial': 'ce489aa2-8e07-4edd-b43a-7068ef1635be'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 937.604803] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94053364-b97f-4c28-95fb-2e4f7a52ddbe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.627897] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342140cf-675c-454e-b186-17732d43f71e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.636282] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495e383a-9e25-4c21-8434-18a80f97081d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.667295] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e691de-5a01-4d76-bb0e-044f89de7bbb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.689958] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Releasing lock "[datastore2] devstack-image-cache_base/163b8ba5-dbfa-4890-b990-7e227e0ccf91" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.689958] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Processing image 163b8ba5-dbfa-4890-b990-7e227e0ccf91 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 937.689958] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/163b8ba5-dbfa-4890-b990-7e227e0ccf91/163b8ba5-dbfa-4890-b990-7e227e0ccf91.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.689958] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquired lock "[datastore2] devstack-image-cache_base/163b8ba5-dbfa-4890-b990-7e227e0ccf91/163b8ba5-dbfa-4890-b990-7e227e0ccf91.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.690461] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 937.690679] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] The volume has not been displaced from its original location: [datastore2] volume-ce489aa2-8e07-4edd-b43a-7068ef1635be/volume-ce489aa2-8e07-4edd-b43a-7068ef1635be.vmdk. No consolidation needed. {{(pid=63345) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 937.697055] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Reconfiguring VM instance instance-00000057 to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 937.699911] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d623f77d-c271-4aec-ae7e-f04c098528a4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.702365] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1fcf7e6-ea1a-4aa2-914a-c08ecfa91b6d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.729886] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017481, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.242114} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.731515] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 937.731958] env[63345]: DEBUG oslo_vmware.api [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 937.731958] env[63345]: value = "task-1017482" [ 937.731958] env[63345]: _type = "Task" [ 937.731958] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.733909] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dfe222a-7d95-4631-af8a-32e1b00e403e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.739958] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 937.739958] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 937.743561] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b95737c-f5eb-4610-aed9-699377f75659 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.767329] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] 726332dd-8699-49a4-a9ea-b9cbfc159855/726332dd-8699-49a4-a9ea-b9cbfc159855.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 937.774157] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e8b24cc-82e6-4ede-ad09-18263debbfd3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.789796] env[63345]: DEBUG oslo_vmware.api [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017482, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.790765] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 937.790765] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f0b67f-6a13-4f23-19d5-20ab52d67d4d" [ 937.790765] env[63345]: _type = "Task" [ 937.790765] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.796637] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 937.796637] env[63345]: value = "task-1017483" [ 937.796637] env[63345]: _type = "Task" [ 937.796637] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.803846] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Preparing fetch location {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 937.804030] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Fetch image to [datastore2] OSTACK_IMG_9b96f675-5113-4f67-bbeb-c8c60557a61d/OSTACK_IMG_9b96f675-5113-4f67-bbeb-c8c60557a61d.vmdk {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 937.804108] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Downloading stream optimized image 163b8ba5-dbfa-4890-b990-7e227e0ccf91 to [datastore2] OSTACK_IMG_9b96f675-5113-4f67-bbeb-c8c60557a61d/OSTACK_IMG_9b96f675-5113-4f67-bbeb-c8c60557a61d.vmdk on the data store datastore2 as vApp {{(pid=63345) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 937.804288] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Downloading image file data 163b8ba5-dbfa-4890-b990-7e227e0ccf91 to the ESX as VM named 'OSTACK_IMG_9b96f675-5113-4f67-bbeb-c8c60557a61d' {{(pid=63345) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 937.812372] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017483, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.897529] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.533s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.900540] env[63345]: DEBUG oslo_concurrency.lockutils [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.461s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.901534] env[63345]: INFO nova.compute.claims [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 937.910494] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 937.910494] env[63345]: value = "resgroup-9" [ 937.910494] env[63345]: _type = "ResourcePool" [ 937.910494] env[63345]: }. {{(pid=63345) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 937.910847] env[63345]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-f877d02e-caee-4798-91f7-564d19ecc957 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.934968] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lease: (returnval){ [ 937.934968] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52fca3e8-e5ad-0616-9424-c987940b317c" [ 937.934968] env[63345]: _type = "HttpNfcLease" [ 937.934968] env[63345]: } obtained for vApp import into resource pool (val){ [ 937.934968] env[63345]: value = "resgroup-9" [ 937.934968] env[63345]: _type = "ResourcePool" [ 937.934968] env[63345]: }. {{(pid=63345) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 937.935443] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the lease: (returnval){ [ 937.935443] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52fca3e8-e5ad-0616-9424-c987940b317c" [ 937.935443] env[63345]: _type = "HttpNfcLease" [ 937.935443] env[63345]: } to be ready. {{(pid=63345) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 937.943266] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 937.943266] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52fca3e8-e5ad-0616-9424-c987940b317c" [ 937.943266] env[63345]: _type = "HttpNfcLease" [ 937.943266] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 937.985715] env[63345]: INFO nova.network.neutron [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Updating port 025d1e18-19a3-43ce-9db9-1590137a5544 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 938.024974] env[63345]: DEBUG nova.network.neutron [req-f1d7727d-1244-4b39-b5e9-4cfd7198a94e req-c777d141-a615-49b3-98f5-65c96ce820f2 service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Updated VIF entry in instance network info cache for port cf06de95-5747-4226-b66c-b9ccca47321d. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 938.025425] env[63345]: DEBUG nova.network.neutron [req-f1d7727d-1244-4b39-b5e9-4cfd7198a94e req-c777d141-a615-49b3-98f5-65c96ce820f2 service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Updating instance_info_cache with network_info: [{"id": "cf06de95-5747-4226-b66c-b9ccca47321d", "address": "fa:16:3e:9e:46:44", "network": {"id": "403ac06e-e45e-4215-bf0c-16ddd583ddc5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1349318740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac5c2a653dae436c97514507939c4e3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf06de95-57", "ovs_interfaceid": "cf06de95-5747-4226-b66c-b9ccca47321d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.053768] env[63345]: DEBUG oslo_concurrency.lockutils [req-ef7ddbcf-5f61-44a5-8d89-05f854e2a0ff req-b4f18310-f384-4e58-8145-9492095e0cc9 service nova] Releasing lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.217197] env[63345]: DEBUG nova.network.neutron [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Successfully updated port: 065f6e50-8edf-4eac-a2e3-d944aa6f33ba {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 938.255088] env[63345]: DEBUG oslo_vmware.api [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017482, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.315012] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017483, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.444348] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 938.444348] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52fca3e8-e5ad-0616-9424-c987940b317c" [ 938.444348] env[63345]: _type = "HttpNfcLease" [ 938.444348] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 938.528360] env[63345]: DEBUG oslo_concurrency.lockutils [req-f1d7727d-1244-4b39-b5e9-4cfd7198a94e req-c777d141-a615-49b3-98f5-65c96ce820f2 service nova] Releasing lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.529256] env[63345]: DEBUG nova.compute.manager [req-f1d7727d-1244-4b39-b5e9-4cfd7198a94e req-c777d141-a615-49b3-98f5-65c96ce820f2 service nova] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Received event network-vif-deleted-56e0ca56-d9a3-439a-b072-ad4f8da026e8 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 938.721938] env[63345]: DEBUG oslo_concurrency.lockutils [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "refresh_cache-49cf9c08-4024-40aa-9370-7b4f8d89e2cf" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.722046] env[63345]: DEBUG oslo_concurrency.lockutils [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "refresh_cache-49cf9c08-4024-40aa-9370-7b4f8d89e2cf" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.722209] env[63345]: DEBUG nova.network.neutron [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 938.753422] env[63345]: DEBUG oslo_vmware.api [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017482, 'name': ReconfigVM_Task, 'duration_secs': 0.604684} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.753776] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Reconfigured VM instance instance-00000057 to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 938.758465] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e324ac8-caa2-426c-b472-7d4a330887a4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.779748] env[63345]: DEBUG oslo_vmware.api [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 938.779748] env[63345]: value = "task-1017485" [ 938.779748] env[63345]: _type = "Task" [ 938.779748] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.790249] env[63345]: DEBUG oslo_vmware.api [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017485, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.811929] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017483, 'name': ReconfigVM_Task, 'duration_secs': 0.607006} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.812267] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Reconfigured VM instance instance-00000059 to attach disk [datastore2] 726332dd-8699-49a4-a9ea-b9cbfc159855/726332dd-8699-49a4-a9ea-b9cbfc159855.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 938.812910] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd7e978d-b237-44a5-a301-f179a87f6e9a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.818766] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 938.818766] env[63345]: value = "task-1017486" [ 938.818766] env[63345]: _type = "Task" [ 938.818766] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.830897] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017486, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.946669] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 938.946669] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52fca3e8-e5ad-0616-9424-c987940b317c" [ 938.946669] env[63345]: _type = "HttpNfcLease" [ 938.946669] env[63345]: } is ready. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 938.946669] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 938.946669] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52fca3e8-e5ad-0616-9424-c987940b317c" [ 938.946669] env[63345]: _type = "HttpNfcLease" [ 938.946669] env[63345]: }. {{(pid=63345) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 938.947373] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-230b79b0-9ef4-4af0-af0a-48bdcfac0a35 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.960702] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5234aa2c-564c-8c15-e09c-b866a194e758/disk-0.vmdk from lease info. {{(pid=63345) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 938.961180] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5234aa2c-564c-8c15-e09c-b866a194e758/disk-0.vmdk. {{(pid=63345) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 939.029882] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d4f22193-8cae-48c3-bc4c-2f79084b6149 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.097592] env[63345]: DEBUG oslo_concurrency.lockutils [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "interface-0da64b45-fa00-4fe8-8d1d-df586f27743f-2b931f56-815d-48ec-915d-c68e2ae0333f" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.097592] env[63345]: DEBUG oslo_concurrency.lockutils [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-0da64b45-fa00-4fe8-8d1d-df586f27743f-2b931f56-815d-48ec-915d-c68e2ae0333f" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.272141] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab005e60-1885-4234-a00f-962e8b75303f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.280218] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a5c48d-001a-417b-87fe-dfa5527af1c4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.291824] env[63345]: DEBUG oslo_vmware.api [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017485, 'name': ReconfigVM_Task, 'duration_secs': 0.134581} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.316217] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226105', 'volume_id': 'ce489aa2-8e07-4edd-b43a-7068ef1635be', 'name': 'volume-ce489aa2-8e07-4edd-b43a-7068ef1635be', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e5546a26-3f94-48a6-914a-2c37e63a0aeb', 'attached_at': '', 'detached_at': '', 'volume_id': 'ce489aa2-8e07-4edd-b43a-7068ef1635be', 'serial': 'ce489aa2-8e07-4edd-b43a-7068ef1635be'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 939.316603] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 939.319658] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-098e3201-7e18-44f6-b07d-4e185f7cf765 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.323298] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c91cec5-10c7-4ca7-b2e7-8d12c6944efd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.336603] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017486, 'name': Rename_Task, 'duration_secs': 0.150094} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.340556] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 939.340851] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 939.341116] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c143c20e-a6dd-4240-86d5-c9b117f07b67 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.343160] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-436e4e9f-9383-494d-a0d7-f3023acd440a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.346693] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618dccf3-e615-4a32-8bcf-12636ee93fee {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.363405] env[63345]: DEBUG nova.compute.provider_tree [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.366028] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 939.366028] env[63345]: value = "task-1017487" [ 939.366028] env[63345]: _type = "Task" [ 939.366028] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.375523] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017487, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.440717] env[63345]: DEBUG nova.compute.manager [req-61332429-725b-4ac5-827d-5c83fa696fa1 req-43845fd0-cd7f-49b4-b064-9657e429d3bb service nova] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Received event network-vif-plugged-065f6e50-8edf-4eac-a2e3-d944aa6f33ba {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 939.440772] env[63345]: DEBUG oslo_concurrency.lockutils [req-61332429-725b-4ac5-827d-5c83fa696fa1 req-43845fd0-cd7f-49b4-b064-9657e429d3bb service nova] Acquiring lock "49cf9c08-4024-40aa-9370-7b4f8d89e2cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.440968] env[63345]: DEBUG oslo_concurrency.lockutils [req-61332429-725b-4ac5-827d-5c83fa696fa1 req-43845fd0-cd7f-49b4-b064-9657e429d3bb service nova] Lock "49cf9c08-4024-40aa-9370-7b4f8d89e2cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.442046] env[63345]: DEBUG oslo_concurrency.lockutils [req-61332429-725b-4ac5-827d-5c83fa696fa1 req-43845fd0-cd7f-49b4-b064-9657e429d3bb service nova] Lock "49cf9c08-4024-40aa-9370-7b4f8d89e2cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.442046] env[63345]: DEBUG nova.compute.manager [req-61332429-725b-4ac5-827d-5c83fa696fa1 req-43845fd0-cd7f-49b4-b064-9657e429d3bb service nova] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] No waiting events found dispatching network-vif-plugged-065f6e50-8edf-4eac-a2e3-d944aa6f33ba {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 939.442046] env[63345]: WARNING nova.compute.manager [req-61332429-725b-4ac5-827d-5c83fa696fa1 req-43845fd0-cd7f-49b4-b064-9657e429d3bb service nova] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Received unexpected event network-vif-plugged-065f6e50-8edf-4eac-a2e3-d944aa6f33ba for instance with vm_state building and task_state spawning. [ 939.442046] env[63345]: DEBUG nova.compute.manager [req-61332429-725b-4ac5-827d-5c83fa696fa1 req-43845fd0-cd7f-49b4-b064-9657e429d3bb service nova] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Received event network-changed-065f6e50-8edf-4eac-a2e3-d944aa6f33ba {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 939.442046] env[63345]: DEBUG nova.compute.manager [req-61332429-725b-4ac5-827d-5c83fa696fa1 req-43845fd0-cd7f-49b4-b064-9657e429d3bb service nova] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Refreshing instance network info cache due to event network-changed-065f6e50-8edf-4eac-a2e3-d944aa6f33ba. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 939.442046] env[63345]: DEBUG oslo_concurrency.lockutils [req-61332429-725b-4ac5-827d-5c83fa696fa1 req-43845fd0-cd7f-49b4-b064-9657e429d3bb service nova] Acquiring lock "refresh_cache-49cf9c08-4024-40aa-9370-7b4f8d89e2cf" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.449157] env[63345]: DEBUG nova.network.neutron [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 939.453518] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 939.453856] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 939.454085] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleting the datastore file [datastore2] e5546a26-3f94-48a6-914a-2c37e63a0aeb {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 939.454360] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42848291-ec45-4783-b515-51d8b330e5e4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.461341] env[63345]: DEBUG oslo_vmware.api [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 939.461341] env[63345]: value = "task-1017489" [ 939.461341] env[63345]: _type = "Task" [ 939.461341] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.469928] env[63345]: DEBUG oslo_vmware.api [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017489, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.600992] env[63345]: DEBUG oslo_concurrency.lockutils [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.604515] env[63345]: DEBUG oslo_concurrency.lockutils [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.604515] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-563725e3-c3e0-4e3a-9b55-7c0cc23f329a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.633975] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5385df70-e46d-48d2-a9f0-41e4b4fa7752 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.668169] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Reconfiguring VM to detach interface {{(pid=63345) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1974}} [ 939.676704] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10be851a-fa88-4e82-90b7-3155b151d058 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.702314] env[63345]: DEBUG oslo_vmware.api [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 939.702314] env[63345]: value = "task-1017490" [ 939.702314] env[63345]: _type = "Task" [ 939.702314] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.718021] env[63345]: DEBUG oslo_vmware.api [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017490, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.868099] env[63345]: DEBUG nova.scheduler.client.report [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 939.882559] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017487, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.914738] env[63345]: DEBUG nova.network.neutron [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Updating instance_info_cache with network_info: [{"id": "065f6e50-8edf-4eac-a2e3-d944aa6f33ba", "address": "fa:16:3e:09:0b:f9", "network": {"id": "f05df594-fc76-4e2d-b29b-6942fee8dc99", "bridge": "br-int", "label": "tempest-ServersTestJSON-241206779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63d7b3facae6416989f763e610cf98f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap065f6e50-8e", "ovs_interfaceid": "065f6e50-8edf-4eac-a2e3-d944aa6f33ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.972129] env[63345]: DEBUG oslo_vmware.api [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017489, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201138} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.973233] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 939.973233] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 939.973233] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 939.973233] env[63345]: INFO nova.compute.manager [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Took 2.89 seconds to destroy the instance on the hypervisor. [ 939.973233] env[63345]: DEBUG oslo.service.loopingcall [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 939.973589] env[63345]: DEBUG nova.compute.manager [-] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 939.973589] env[63345]: DEBUG nova.network.neutron [-] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 940.214505] env[63345]: DEBUG oslo_vmware.api [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017490, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.377543] env[63345]: DEBUG oslo_concurrency.lockutils [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.478s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.378275] env[63345]: DEBUG nova.compute.manager [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 940.382261] env[63345]: DEBUG oslo_concurrency.lockutils [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.992s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.382761] env[63345]: DEBUG nova.objects.instance [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lazy-loading 'resources' on Instance uuid a85688b0-d68f-4370-bd95-dc9fb1d2c26a {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 940.401236] env[63345]: DEBUG oslo_vmware.api [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017487, 'name': PowerOnVM_Task, 'duration_secs': 0.529737} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.406837] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 940.407099] env[63345]: INFO nova.compute.manager [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Took 9.72 seconds to spawn the instance on the hypervisor. [ 940.407290] env[63345]: DEBUG nova.compute.manager [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 940.408293] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd04622d-5588-4f21-9589-73357140f2fd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.426661] env[63345]: DEBUG oslo_concurrency.lockutils [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "refresh_cache-49cf9c08-4024-40aa-9370-7b4f8d89e2cf" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.427542] env[63345]: DEBUG nova.compute.manager [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Instance network_info: |[{"id": "065f6e50-8edf-4eac-a2e3-d944aa6f33ba", "address": "fa:16:3e:09:0b:f9", "network": {"id": "f05df594-fc76-4e2d-b29b-6942fee8dc99", "bridge": "br-int", "label": "tempest-ServersTestJSON-241206779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63d7b3facae6416989f763e610cf98f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap065f6e50-8e", "ovs_interfaceid": "065f6e50-8edf-4eac-a2e3-d944aa6f33ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 940.429042] env[63345]: DEBUG oslo_concurrency.lockutils [req-61332429-725b-4ac5-827d-5c83fa696fa1 req-43845fd0-cd7f-49b4-b064-9657e429d3bb service nova] Acquired lock "refresh_cache-49cf9c08-4024-40aa-9370-7b4f8d89e2cf" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.429042] env[63345]: DEBUG nova.network.neutron [req-61332429-725b-4ac5-827d-5c83fa696fa1 req-43845fd0-cd7f-49b4-b064-9657e429d3bb service nova] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Refreshing network info cache for port 065f6e50-8edf-4eac-a2e3-d944aa6f33ba {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 940.430507] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:0b:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '065f6e50-8edf-4eac-a2e3-d944aa6f33ba', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 940.439832] env[63345]: DEBUG oslo.service.loopingcall [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 940.443135] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 940.443135] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a4c4975-ffb2-4c80-83ce-e59f5bc1b654 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.468978] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 940.468978] env[63345]: value = "task-1017491" [ 940.468978] env[63345]: _type = "Task" [ 940.468978] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.486281] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017491, 'name': CreateVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.525139] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Completed reading data from the image iterator. {{(pid=63345) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 940.525139] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5234aa2c-564c-8c15-e09c-b866a194e758/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 940.525528] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f135f5-51f7-44b9-8d7e-a690ef6d1d1f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.533286] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5234aa2c-564c-8c15-e09c-b866a194e758/disk-0.vmdk is in state: ready. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 940.533675] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5234aa2c-564c-8c15-e09c-b866a194e758/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 940.534056] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-bfc9c31c-b21b-4225-9f5a-ad60b28ea142 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.715254] env[63345]: DEBUG oslo_vmware.api [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017490, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.767933] env[63345]: DEBUG oslo_vmware.rw_handles [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5234aa2c-564c-8c15-e09c-b866a194e758/disk-0.vmdk. {{(pid=63345) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 940.768227] env[63345]: INFO nova.virt.vmwareapi.images [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Downloaded image file data 163b8ba5-dbfa-4890-b990-7e227e0ccf91 [ 940.769059] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d2fa9c-c54e-4ac9-bd2b-d28d8eb5275d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.794897] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17b3b22e-5773-4b37-8694-3b74a284ad00 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.822089] env[63345]: INFO nova.virt.vmwareapi.images [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] The imported VM was unregistered [ 940.823910] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Caching image {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 940.824171] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Creating directory with path [datastore2] devstack-image-cache_base/163b8ba5-dbfa-4890-b990-7e227e0ccf91 {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 940.824488] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc154a3d-5a5f-4f7b-a8fd-02d7f40b5227 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.847140] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Created directory with path [datastore2] devstack-image-cache_base/163b8ba5-dbfa-4890-b990-7e227e0ccf91 {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 940.847140] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_9b96f675-5113-4f67-bbeb-c8c60557a61d/OSTACK_IMG_9b96f675-5113-4f67-bbeb-c8c60557a61d.vmdk to [datastore2] devstack-image-cache_base/163b8ba5-dbfa-4890-b990-7e227e0ccf91/163b8ba5-dbfa-4890-b990-7e227e0ccf91.vmdk. {{(pid=63345) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 940.847140] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-56a8590d-c3ae-4489-8f97-0c9c46dc49e7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.854207] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 940.854207] env[63345]: value = "task-1017493" [ 940.854207] env[63345]: _type = "Task" [ 940.854207] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.864733] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017493, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.889432] env[63345]: DEBUG nova.compute.utils [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 940.897207] env[63345]: DEBUG nova.compute.manager [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 940.897402] env[63345]: DEBUG nova.network.neutron [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 940.938277] env[63345]: INFO nova.compute.manager [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Took 40.74 seconds to build instance. [ 940.983532] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017491, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.038477] env[63345]: DEBUG nova.policy [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '738e7097762c42d490a66c3d86af9635', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '41afa63287424a549133615eb390bac7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 941.162081] env[63345]: DEBUG nova.network.neutron [-] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.213946] env[63345]: DEBUG oslo_vmware.api [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017490, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.248010] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54a2578-cd97-4d0e-8b0c-d13e039d3ff7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.253676] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.253676] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquired lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.253676] env[63345]: DEBUG nova.network.neutron [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 941.258584] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c5ea87-5093-467c-aef5-45961229cb60 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.294170] env[63345]: DEBUG nova.network.neutron [req-61332429-725b-4ac5-827d-5c83fa696fa1 req-43845fd0-cd7f-49b4-b064-9657e429d3bb service nova] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Updated VIF entry in instance network info cache for port 065f6e50-8edf-4eac-a2e3-d944aa6f33ba. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 941.294573] env[63345]: DEBUG nova.network.neutron [req-61332429-725b-4ac5-827d-5c83fa696fa1 req-43845fd0-cd7f-49b4-b064-9657e429d3bb service nova] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Updating instance_info_cache with network_info: [{"id": "065f6e50-8edf-4eac-a2e3-d944aa6f33ba", "address": "fa:16:3e:09:0b:f9", "network": {"id": "f05df594-fc76-4e2d-b29b-6942fee8dc99", "bridge": "br-int", "label": "tempest-ServersTestJSON-241206779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63d7b3facae6416989f763e610cf98f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap065f6e50-8e", "ovs_interfaceid": "065f6e50-8edf-4eac-a2e3-d944aa6f33ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.296362] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec70b43-b626-491b-b28d-604c1bb8e5c3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.309576] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04fb2b21-c864-41e6-a543-071697a54f98 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.326741] env[63345]: DEBUG nova.compute.provider_tree [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.365821] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017493, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.398510] env[63345]: DEBUG nova.compute.manager [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 941.442042] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dd3af5ec-8121-4428-9d95-74176760e401 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "726332dd-8699-49a4-a9ea-b9cbfc159855" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.250s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.487769] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017491, 'name': CreateVM_Task, 'duration_secs': 0.524956} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.488117] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 941.488921] env[63345]: DEBUG oslo_concurrency.lockutils [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.489247] env[63345]: DEBUG oslo_concurrency.lockutils [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.489602] env[63345]: DEBUG oslo_concurrency.lockutils [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 941.489931] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d31cc121-02ef-4b59-84fb-effb513c6ce3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.495071] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 941.495071] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b0cb45-e392-62cd-cdb9-06044e72b4ba" [ 941.495071] env[63345]: _type = "Task" [ 941.495071] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.503881] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b0cb45-e392-62cd-cdb9-06044e72b4ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.620850] env[63345]: DEBUG nova.compute.manager [req-4a02cd79-9d02-4da0-98b9-1c2e88b77973 req-d705df61-0b7e-47d0-b2f4-003d90f58665 service nova] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Received event network-vif-deleted-08cd427f-e8d7-43d7-9746-27262c5bd8d2 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 941.620850] env[63345]: DEBUG nova.compute.manager [req-4a02cd79-9d02-4da0-98b9-1c2e88b77973 req-d705df61-0b7e-47d0-b2f4-003d90f58665 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Received event network-vif-plugged-025d1e18-19a3-43ce-9db9-1590137a5544 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 941.621008] env[63345]: DEBUG oslo_concurrency.lockutils [req-4a02cd79-9d02-4da0-98b9-1c2e88b77973 req-d705df61-0b7e-47d0-b2f4-003d90f58665 service nova] Acquiring lock "9aa651b8-317d-4153-8c33-9df0a5d16115-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.621489] env[63345]: DEBUG oslo_concurrency.lockutils [req-4a02cd79-9d02-4da0-98b9-1c2e88b77973 req-d705df61-0b7e-47d0-b2f4-003d90f58665 service nova] Lock "9aa651b8-317d-4153-8c33-9df0a5d16115-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.625020] env[63345]: DEBUG oslo_concurrency.lockutils [req-4a02cd79-9d02-4da0-98b9-1c2e88b77973 req-d705df61-0b7e-47d0-b2f4-003d90f58665 service nova] Lock "9aa651b8-317d-4153-8c33-9df0a5d16115-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.625020] env[63345]: DEBUG nova.compute.manager [req-4a02cd79-9d02-4da0-98b9-1c2e88b77973 req-d705df61-0b7e-47d0-b2f4-003d90f58665 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] No waiting events found dispatching network-vif-plugged-025d1e18-19a3-43ce-9db9-1590137a5544 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 941.625020] env[63345]: WARNING nova.compute.manager [req-4a02cd79-9d02-4da0-98b9-1c2e88b77973 req-d705df61-0b7e-47d0-b2f4-003d90f58665 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Received unexpected event network-vif-plugged-025d1e18-19a3-43ce-9db9-1590137a5544 for instance with vm_state shelved_offloaded and task_state spawning. [ 941.625020] env[63345]: DEBUG nova.compute.manager [req-4a02cd79-9d02-4da0-98b9-1c2e88b77973 req-d705df61-0b7e-47d0-b2f4-003d90f58665 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Received event network-changed-025d1e18-19a3-43ce-9db9-1590137a5544 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 941.625020] env[63345]: DEBUG nova.compute.manager [req-4a02cd79-9d02-4da0-98b9-1c2e88b77973 req-d705df61-0b7e-47d0-b2f4-003d90f58665 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Refreshing instance network info cache due to event network-changed-025d1e18-19a3-43ce-9db9-1590137a5544. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 941.625020] env[63345]: DEBUG oslo_concurrency.lockutils [req-4a02cd79-9d02-4da0-98b9-1c2e88b77973 req-d705df61-0b7e-47d0-b2f4-003d90f58665 service nova] Acquiring lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.665108] env[63345]: INFO nova.compute.manager [-] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Took 1.69 seconds to deallocate network for instance. [ 941.718508] env[63345]: DEBUG oslo_vmware.api [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017490, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.804028] env[63345]: DEBUG oslo_concurrency.lockutils [req-61332429-725b-4ac5-827d-5c83fa696fa1 req-43845fd0-cd7f-49b4-b064-9657e429d3bb service nova] Releasing lock "refresh_cache-49cf9c08-4024-40aa-9370-7b4f8d89e2cf" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.832271] env[63345]: DEBUG nova.scheduler.client.report [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 941.866451] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017493, 'name': MoveVirtualDisk_Task} progress is 26%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.015102] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b0cb45-e392-62cd-cdb9-06044e72b4ba, 'name': SearchDatastore_Task, 'duration_secs': 0.064614} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.015468] env[63345]: DEBUG oslo_concurrency.lockutils [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.015879] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 942.016162] env[63345]: DEBUG oslo_concurrency.lockutils [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.016315] env[63345]: DEBUG oslo_concurrency.lockutils [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.016499] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.017910] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-887eeaf1-8a8a-4f81-b6e9-03898e681b14 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.037251] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.037518] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 942.038429] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc387df9-e057-4036-874b-c5802672c7c4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.048095] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 942.048095] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5211340a-9ed7-c205-d21e-993423fbae2e" [ 942.048095] env[63345]: _type = "Task" [ 942.048095] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.061396] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5211340a-9ed7-c205-d21e-993423fbae2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.096170] env[63345]: DEBUG nova.network.neutron [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Updating instance_info_cache with network_info: [{"id": "025d1e18-19a3-43ce-9db9-1590137a5544", "address": "fa:16:3e:9b:36:a9", "network": {"id": "95d95c9b-b21c-4ee5-ab54-d0bf2699d38e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-88421441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba08f64c26d245a8b8f2b52ea97c2f1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap025d1e18-19", "ovs_interfaceid": "025d1e18-19a3-43ce-9db9-1590137a5544", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.225287] env[63345]: DEBUG oslo_vmware.api [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017490, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.228557] env[63345]: INFO nova.compute.manager [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Took 0.56 seconds to detach 1 volumes for instance. [ 942.284363] env[63345]: DEBUG nova.compute.manager [req-ec8724fa-c9ab-463e-9e4c-3493cf576506 req-71c93a74-e4ba-4fa8-b8c3-2d626657a0f2 service nova] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Received event network-changed-114e38e0-a558-4242-ad5b-4aac063dcb72 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 942.284638] env[63345]: DEBUG nova.compute.manager [req-ec8724fa-c9ab-463e-9e4c-3493cf576506 req-71c93a74-e4ba-4fa8-b8c3-2d626657a0f2 service nova] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Refreshing instance network info cache due to event network-changed-114e38e0-a558-4242-ad5b-4aac063dcb72. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 942.284781] env[63345]: DEBUG oslo_concurrency.lockutils [req-ec8724fa-c9ab-463e-9e4c-3493cf576506 req-71c93a74-e4ba-4fa8-b8c3-2d626657a0f2 service nova] Acquiring lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.284957] env[63345]: DEBUG oslo_concurrency.lockutils [req-ec8724fa-c9ab-463e-9e4c-3493cf576506 req-71c93a74-e4ba-4fa8-b8c3-2d626657a0f2 service nova] Acquired lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.286930] env[63345]: DEBUG nova.network.neutron [req-ec8724fa-c9ab-463e-9e4c-3493cf576506 req-71c93a74-e4ba-4fa8-b8c3-2d626657a0f2 service nova] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Refreshing network info cache for port 114e38e0-a558-4242-ad5b-4aac063dcb72 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 942.320620] env[63345]: DEBUG nova.network.neutron [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Successfully created port: bbb77ecb-8d67-4f41-9505-2571225c8480 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 942.335251] env[63345]: DEBUG oslo_concurrency.lockutils [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.953s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.337903] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.990s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.339246] env[63345]: DEBUG nova.objects.instance [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Lazy-loading 'resources' on Instance uuid 017a06b3-cc1a-4822-a07f-ca881fd4254b {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.365279] env[63345]: INFO nova.scheduler.client.report [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Deleted allocations for instance a85688b0-d68f-4370-bd95-dc9fb1d2c26a [ 942.376937] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017493, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.409360] env[63345]: DEBUG nova.compute.manager [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 942.447620] env[63345]: DEBUG nova.virt.hardware [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 942.447917] env[63345]: DEBUG nova.virt.hardware [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 942.448452] env[63345]: DEBUG nova.virt.hardware [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 942.448767] env[63345]: DEBUG nova.virt.hardware [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 942.449010] env[63345]: DEBUG nova.virt.hardware [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 942.451027] env[63345]: DEBUG nova.virt.hardware [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 942.451027] env[63345]: DEBUG nova.virt.hardware [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 942.451027] env[63345]: DEBUG nova.virt.hardware [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 942.451027] env[63345]: DEBUG nova.virt.hardware [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 942.451243] env[63345]: DEBUG nova.virt.hardware [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 942.451495] env[63345]: DEBUG nova.virt.hardware [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 942.452833] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1651a287-91ac-4299-a435-e315ac0f1ee0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.463888] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08caf0a6-bc4d-4a6b-a9bc-ded5cd5ea8ac {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.563364] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5211340a-9ed7-c205-d21e-993423fbae2e, 'name': SearchDatastore_Task, 'duration_secs': 0.074188} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.563364] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c76a5d57-f2d7-4829-b902-40405b6b3324 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.573065] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 942.573065] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5293d543-37a8-1fcf-be13-32082f13f212" [ 942.573065] env[63345]: _type = "Task" [ 942.573065] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.592712] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5293d543-37a8-1fcf-be13-32082f13f212, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.599603] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Releasing lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.604811] env[63345]: DEBUG oslo_concurrency.lockutils [req-4a02cd79-9d02-4da0-98b9-1c2e88b77973 req-d705df61-0b7e-47d0-b2f4-003d90f58665 service nova] Acquired lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.604811] env[63345]: DEBUG nova.network.neutron [req-4a02cd79-9d02-4da0-98b9-1c2e88b77973 req-d705df61-0b7e-47d0-b2f4-003d90f58665 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Refreshing network info cache for port 025d1e18-19a3-43ce-9db9-1590137a5544 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 942.625546] env[63345]: DEBUG nova.virt.hardware [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c4b78649a407b4d38a24ffa8256119ae',container_format='bare',created_at=2024-09-30T09:40:06Z,direct_url=,disk_format='vmdk',id=487d34b5-b85c-4d35-8b15-fd7347b3dcfd,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-914170666-shelved',owner='ba08f64c26d245a8b8f2b52ea97c2f1a',properties=ImageMetaProps,protected=,size=31666688,status='active',tags=,updated_at=2024-09-30T09:40:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 942.625892] env[63345]: DEBUG nova.virt.hardware [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 942.626108] env[63345]: DEBUG nova.virt.hardware [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 942.626311] env[63345]: DEBUG nova.virt.hardware [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 942.626466] env[63345]: DEBUG nova.virt.hardware [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 942.626621] env[63345]: DEBUG nova.virt.hardware [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 942.627137] env[63345]: DEBUG nova.virt.hardware [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 942.627226] env[63345]: DEBUG nova.virt.hardware [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 942.627415] env[63345]: DEBUG nova.virt.hardware [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 942.627596] env[63345]: DEBUG nova.virt.hardware [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 942.628182] env[63345]: DEBUG nova.virt.hardware [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 942.630091] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982405e4-757d-4cc5-ac8b-29cb93664cc4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.641406] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5947382-f883-4e9b-9153-e9f73ee89107 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.661290] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:36:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '025d1e18-19a3-43ce-9db9-1590137a5544', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 942.671246] env[63345]: DEBUG oslo.service.loopingcall [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 942.671246] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 942.671943] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a9a0b37a-0c8e-4acd-9dc6-00608152fb61 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.696498] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 942.696498] env[63345]: value = "task-1017494" [ 942.696498] env[63345]: _type = "Task" [ 942.696498] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.709141] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017494, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.725125] env[63345]: DEBUG oslo_vmware.api [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017490, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.738562] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.874679] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017493, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.885099] env[63345]: DEBUG oslo_concurrency.lockutils [None req-783e1b39-4458-4385-8bb3-3cd8ebb8adbb tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "a85688b0-d68f-4370-bd95-dc9fb1d2c26a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.564s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.092438] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5293d543-37a8-1fcf-be13-32082f13f212, 'name': SearchDatastore_Task, 'duration_secs': 0.091194} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.092603] env[63345]: DEBUG oslo_concurrency.lockutils [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.092881] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 49cf9c08-4024-40aa-9370-7b4f8d89e2cf/49cf9c08-4024-40aa-9370-7b4f8d89e2cf.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 943.093171] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dfe4203e-e0b4-4dda-b5c2-4cf8445c79de {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.102660] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 943.102660] env[63345]: value = "task-1017495" [ 943.102660] env[63345]: _type = "Task" [ 943.102660] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.122559] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017495, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.123611] env[63345]: DEBUG nova.network.neutron [req-ec8724fa-c9ab-463e-9e4c-3493cf576506 req-71c93a74-e4ba-4fa8-b8c3-2d626657a0f2 service nova] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updated VIF entry in instance network info cache for port 114e38e0-a558-4242-ad5b-4aac063dcb72. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 943.123971] env[63345]: DEBUG nova.network.neutron [req-ec8724fa-c9ab-463e-9e4c-3493cf576506 req-71c93a74-e4ba-4fa8-b8c3-2d626657a0f2 service nova] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updating instance_info_cache with network_info: [{"id": "114e38e0-a558-4242-ad5b-4aac063dcb72", "address": "fa:16:3e:bb:2c:f8", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap114e38e0-a5", "ovs_interfaceid": "114e38e0-a558-4242-ad5b-4aac063dcb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.201134] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a81b966-64e8-4e6d-9ea1-f4c64cab78b8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.224585] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017494, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.226039] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcaac7a0-7991-4e23-bf1a-3ffb70392e1e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.233980] env[63345]: DEBUG oslo_vmware.api [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017490, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.279030] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9da6b0f-a99c-4c10-8f10-ec55942e4083 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.292585] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f74092b2-b944-4584-9f8c-e92886b0252f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.307171] env[63345]: DEBUG nova.compute.provider_tree [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.369828] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017493, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.469069] env[63345]: DEBUG nova.network.neutron [req-4a02cd79-9d02-4da0-98b9-1c2e88b77973 req-d705df61-0b7e-47d0-b2f4-003d90f58665 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Updated VIF entry in instance network info cache for port 025d1e18-19a3-43ce-9db9-1590137a5544. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 943.469493] env[63345]: DEBUG nova.network.neutron [req-4a02cd79-9d02-4da0-98b9-1c2e88b77973 req-d705df61-0b7e-47d0-b2f4-003d90f58665 service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Updating instance_info_cache with network_info: [{"id": "025d1e18-19a3-43ce-9db9-1590137a5544", "address": "fa:16:3e:9b:36:a9", "network": {"id": "95d95c9b-b21c-4ee5-ab54-d0bf2699d38e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-88421441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba08f64c26d245a8b8f2b52ea97c2f1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap025d1e18-19", "ovs_interfaceid": "025d1e18-19a3-43ce-9db9-1590137a5544", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.618114] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017495, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.630265] env[63345]: DEBUG oslo_concurrency.lockutils [req-ec8724fa-c9ab-463e-9e4c-3493cf576506 req-71c93a74-e4ba-4fa8-b8c3-2d626657a0f2 service nova] Releasing lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.713341] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017494, 'name': CreateVM_Task, 'duration_secs': 0.77852} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.717135] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 943.718222] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/487d34b5-b85c-4d35-8b15-fd7347b3dcfd" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.718872] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquired lock "[datastore2] devstack-image-cache_base/487d34b5-b85c-4d35-8b15-fd7347b3dcfd" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.719370] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/487d34b5-b85c-4d35-8b15-fd7347b3dcfd" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 943.722523] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a422756e-02f9-44ab-a493-3e034a7a373e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.727112] env[63345]: DEBUG oslo_vmware.api [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017490, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.730496] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 943.730496] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f1ffe4-0fc0-4de9-786f-5f44dc2d0768" [ 943.730496] env[63345]: _type = "Task" [ 943.730496] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.739377] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f1ffe4-0fc0-4de9-786f-5f44dc2d0768, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.778614] env[63345]: DEBUG oslo_concurrency.lockutils [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "b5173471-3367-42ba-b450-62ad8573f048" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.778893] env[63345]: DEBUG oslo_concurrency.lockutils [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "b5173471-3367-42ba-b450-62ad8573f048" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.779142] env[63345]: DEBUG oslo_concurrency.lockutils [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "b5173471-3367-42ba-b450-62ad8573f048-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.779357] env[63345]: DEBUG oslo_concurrency.lockutils [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "b5173471-3367-42ba-b450-62ad8573f048-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.779511] env[63345]: DEBUG oslo_concurrency.lockutils [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "b5173471-3367-42ba-b450-62ad8573f048-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.781795] env[63345]: INFO nova.compute.manager [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Terminating instance [ 943.813957] env[63345]: DEBUG nova.scheduler.client.report [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 943.873203] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017493, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.972720] env[63345]: DEBUG oslo_concurrency.lockutils [req-4a02cd79-9d02-4da0-98b9-1c2e88b77973 req-d705df61-0b7e-47d0-b2f4-003d90f58665 service nova] Releasing lock "refresh_cache-9aa651b8-317d-4153-8c33-9df0a5d16115" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.117801] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017495, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.223042] env[63345]: DEBUG oslo_vmware.api [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017490, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.240647] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Releasing lock "[datastore2] devstack-image-cache_base/487d34b5-b85c-4d35-8b15-fd7347b3dcfd" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.240951] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Processing image 487d34b5-b85c-4d35-8b15-fd7347b3dcfd {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 944.241235] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/487d34b5-b85c-4d35-8b15-fd7347b3dcfd/487d34b5-b85c-4d35-8b15-fd7347b3dcfd.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.241390] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquired lock "[datastore2] devstack-image-cache_base/487d34b5-b85c-4d35-8b15-fd7347b3dcfd/487d34b5-b85c-4d35-8b15-fd7347b3dcfd.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.241595] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 944.241894] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8270e146-607b-4374-8b02-25c4ecd7d296 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.257201] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 944.257201] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 944.257829] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83e9c110-7f52-441f-b57b-0d8beb862a59 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.263495] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 944.263495] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]520c62d2-5f28-188e-64c4-cdcda30913c1" [ 944.263495] env[63345]: _type = "Task" [ 944.263495] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.271862] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]520c62d2-5f28-188e-64c4-cdcda30913c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.290580] env[63345]: DEBUG nova.compute.manager [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 944.290580] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 944.291064] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f0992a-7431-4a33-b524-70412ff4175e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.298779] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 944.299098] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e15a785-9d55-410b-8e50-6082039a2d29 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.306404] env[63345]: DEBUG oslo_vmware.api [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 944.306404] env[63345]: value = "task-1017496" [ 944.306404] env[63345]: _type = "Task" [ 944.306404] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.315027] env[63345]: DEBUG oslo_vmware.api [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017496, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.324072] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.986s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.327044] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 11.126s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.345299] env[63345]: INFO nova.scheduler.client.report [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Deleted allocations for instance 017a06b3-cc1a-4822-a07f-ca881fd4254b [ 944.370743] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017493, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.108084} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.371097] env[63345]: INFO nova.virt.vmwareapi.ds_util [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_9b96f675-5113-4f67-bbeb-c8c60557a61d/OSTACK_IMG_9b96f675-5113-4f67-bbeb-c8c60557a61d.vmdk to [datastore2] devstack-image-cache_base/163b8ba5-dbfa-4890-b990-7e227e0ccf91/163b8ba5-dbfa-4890-b990-7e227e0ccf91.vmdk. [ 944.371321] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Cleaning up location [datastore2] OSTACK_IMG_9b96f675-5113-4f67-bbeb-c8c60557a61d {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 944.371518] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_9b96f675-5113-4f67-bbeb-c8c60557a61d {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 944.372192] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb0ad5eb-e826-465c-bcca-a80065f2e8b0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.379781] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 944.379781] env[63345]: value = "task-1017497" [ 944.379781] env[63345]: _type = "Task" [ 944.379781] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.390832] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017497, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.481080] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Acquiring lock "b3f20003-f75d-4d9f-bb4a-02d2930054a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.481505] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Lock "b3f20003-f75d-4d9f-bb4a-02d2930054a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.484595] env[63345]: DEBUG nova.compute.manager [req-f0e37942-c588-49fc-b69d-f85736da454f req-c975683f-f863-4820-ae95-74ad11bbb4e1 service nova] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Received event network-vif-plugged-bbb77ecb-8d67-4f41-9505-2571225c8480 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 944.484893] env[63345]: DEBUG oslo_concurrency.lockutils [req-f0e37942-c588-49fc-b69d-f85736da454f req-c975683f-f863-4820-ae95-74ad11bbb4e1 service nova] Acquiring lock "a0eb9dae-0d27-419f-9210-eaa445e564c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.485169] env[63345]: DEBUG oslo_concurrency.lockutils [req-f0e37942-c588-49fc-b69d-f85736da454f req-c975683f-f863-4820-ae95-74ad11bbb4e1 service nova] Lock "a0eb9dae-0d27-419f-9210-eaa445e564c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.489236] env[63345]: DEBUG oslo_concurrency.lockutils [req-f0e37942-c588-49fc-b69d-f85736da454f req-c975683f-f863-4820-ae95-74ad11bbb4e1 service nova] Lock "a0eb9dae-0d27-419f-9210-eaa445e564c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.489445] env[63345]: DEBUG nova.compute.manager [req-f0e37942-c588-49fc-b69d-f85736da454f req-c975683f-f863-4820-ae95-74ad11bbb4e1 service nova] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] No waiting events found dispatching network-vif-plugged-bbb77ecb-8d67-4f41-9505-2571225c8480 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 944.489634] env[63345]: WARNING nova.compute.manager [req-f0e37942-c588-49fc-b69d-f85736da454f req-c975683f-f863-4820-ae95-74ad11bbb4e1 service nova] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Received unexpected event network-vif-plugged-bbb77ecb-8d67-4f41-9505-2571225c8480 for instance with vm_state building and task_state spawning. [ 944.597440] env[63345]: DEBUG nova.network.neutron [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Successfully updated port: bbb77ecb-8d67-4f41-9505-2571225c8480 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 944.616550] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017495, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.337173} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.616832] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 49cf9c08-4024-40aa-9370-7b4f8d89e2cf/49cf9c08-4024-40aa-9370-7b4f8d89e2cf.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 944.617082] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 944.617318] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf440a07-da54-4472-b6e0-a44ddc927c95 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.623816] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 944.623816] env[63345]: value = "task-1017498" [ 944.623816] env[63345]: _type = "Task" [ 944.623816] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.631189] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017498, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.723599] env[63345]: DEBUG oslo_vmware.api [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017490, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.773418] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Preparing fetch location {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 944.773700] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Fetch image to [datastore2] OSTACK_IMG_6b9edf2c-e274-4057-9764-29012b7e5e4c/OSTACK_IMG_6b9edf2c-e274-4057-9764-29012b7e5e4c.vmdk {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 944.773891] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Downloading stream optimized image 487d34b5-b85c-4d35-8b15-fd7347b3dcfd to [datastore2] OSTACK_IMG_6b9edf2c-e274-4057-9764-29012b7e5e4c/OSTACK_IMG_6b9edf2c-e274-4057-9764-29012b7e5e4c.vmdk on the data store datastore2 as vApp {{(pid=63345) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 944.774100] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Downloading image file data 487d34b5-b85c-4d35-8b15-fd7347b3dcfd to the ESX as VM named 'OSTACK_IMG_6b9edf2c-e274-4057-9764-29012b7e5e4c' {{(pid=63345) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 944.826710] env[63345]: DEBUG oslo_vmware.api [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017496, 'name': PowerOffVM_Task, 'duration_secs': 0.266909} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.827028] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 944.827213] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 944.827975] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a72ee1e5-f4ee-480d-9cb6-17cb4c7a3e7d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.832705] env[63345]: INFO nova.compute.claims [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 944.854928] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8ded89f7-36e8-4b1f-bb8f-eb5b0d30d69a tempest-FloatingIPsAssociationNegativeTestJSON-527930040 tempest-FloatingIPsAssociationNegativeTestJSON-527930040-project-member] Lock "017a06b3-cc1a-4822-a07f-ca881fd4254b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.014s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.867656] env[63345]: DEBUG oslo_vmware.rw_handles [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 944.867656] env[63345]: value = "resgroup-9" [ 944.867656] env[63345]: _type = "ResourcePool" [ 944.867656] env[63345]: }. {{(pid=63345) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 944.867939] env[63345]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b8f2fc64-992a-4106-aa85-d9bb7c683124 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.896501] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017497, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099808} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.896784] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 944.896963] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Releasing lock "[datastore2] devstack-image-cache_base/163b8ba5-dbfa-4890-b990-7e227e0ccf91/163b8ba5-dbfa-4890-b990-7e227e0ccf91.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.897233] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/163b8ba5-dbfa-4890-b990-7e227e0ccf91/163b8ba5-dbfa-4890-b990-7e227e0ccf91.vmdk to [datastore2] 070a834d-6478-4705-8df0-2a27c8780507/070a834d-6478-4705-8df0-2a27c8780507.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 944.898087] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f53c74d-170b-49f8-b0ad-c6c523bd1881 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.901586] env[63345]: DEBUG oslo_vmware.rw_handles [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lease: (returnval){ [ 944.901586] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529ac902-125e-f215-d59c-2a6c975ca948" [ 944.901586] env[63345]: _type = "HttpNfcLease" [ 944.901586] env[63345]: } obtained for vApp import into resource pool (val){ [ 944.901586] env[63345]: value = "resgroup-9" [ 944.901586] env[63345]: _type = "ResourcePool" [ 944.901586] env[63345]: }. {{(pid=63345) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 944.901846] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the lease: (returnval){ [ 944.901846] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529ac902-125e-f215-d59c-2a6c975ca948" [ 944.901846] env[63345]: _type = "HttpNfcLease" [ 944.901846] env[63345]: } to be ready. {{(pid=63345) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 944.902099] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 944.902249] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 944.902424] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Deleting the datastore file [datastore2] b5173471-3367-42ba-b450-62ad8573f048 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 944.903020] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9714892-2591-44c0-b305-f57f4433edb9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.909996] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 944.909996] env[63345]: value = "task-1017501" [ 944.909996] env[63345]: _type = "Task" [ 944.909996] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.914801] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 944.914801] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529ac902-125e-f215-d59c-2a6c975ca948" [ 944.914801] env[63345]: _type = "HttpNfcLease" [ 944.914801] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 944.915328] env[63345]: DEBUG oslo_vmware.api [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 944.915328] env[63345]: value = "task-1017502" [ 944.915328] env[63345]: _type = "Task" [ 944.915328] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.921712] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017501, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.926522] env[63345]: DEBUG oslo_vmware.api [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017502, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.990702] env[63345]: DEBUG nova.compute.manager [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 945.099937] env[63345]: DEBUG oslo_concurrency.lockutils [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "refresh_cache-a0eb9dae-0d27-419f-9210-eaa445e564c8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.100317] env[63345]: DEBUG oslo_concurrency.lockutils [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquired lock "refresh_cache-a0eb9dae-0d27-419f-9210-eaa445e564c8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.100485] env[63345]: DEBUG nova.network.neutron [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 945.134034] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017498, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069006} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.134249] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 945.135575] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754bbc65-84dc-4160-937f-90c19757e042 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.157348] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] 49cf9c08-4024-40aa-9370-7b4f8d89e2cf/49cf9c08-4024-40aa-9370-7b4f8d89e2cf.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 945.157930] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd68effd-dc1f-4b67-b806-f88373b38148 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.177766] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 945.177766] env[63345]: value = "task-1017503" [ 945.177766] env[63345]: _type = "Task" [ 945.177766] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.185879] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017503, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.223438] env[63345]: DEBUG oslo_vmware.api [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017490, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.341275] env[63345]: INFO nova.compute.resource_tracker [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Updating resource usage from migration 065e3d6c-c475-45d4-ae3e-8a6d8de03d6b [ 945.412763] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 945.412763] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529ac902-125e-f215-d59c-2a6c975ca948" [ 945.412763] env[63345]: _type = "HttpNfcLease" [ 945.412763] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 945.429284] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017501, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.432726] env[63345]: DEBUG oslo_vmware.api [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017502, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.458571} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.435738] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 945.436011] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 945.436228] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 945.436426] env[63345]: INFO nova.compute.manager [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: b5173471-3367-42ba-b450-62ad8573f048] Took 1.15 seconds to destroy the instance on the hypervisor. [ 945.436690] env[63345]: DEBUG oslo.service.loopingcall [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 945.437194] env[63345]: DEBUG nova.compute.manager [-] [instance: b5173471-3367-42ba-b450-62ad8573f048] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 945.437303] env[63345]: DEBUG nova.network.neutron [-] [instance: b5173471-3367-42ba-b450-62ad8573f048] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 945.512912] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.638313] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f8e415-69bb-4f63-83b1-94f2d5541ad0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.648139] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22160257-ae1c-47ab-85b7-710d0624a4c1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.690429] env[63345]: DEBUG nova.network.neutron [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 945.696277] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a9b57c-f458-4ce4-a858-38867fe4b30f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.708434] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb719938-936f-49cc-9fca-cc1f56f2723e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.712764] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017503, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.729645] env[63345]: DEBUG nova.compute.provider_tree [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.736591] env[63345]: DEBUG oslo_vmware.api [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017490, 'name': ReconfigVM_Task, 'duration_secs': 5.813624} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.736766] env[63345]: DEBUG oslo_concurrency.lockutils [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.736883] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Reconfigured VM to detach interface {{(pid=63345) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1984}} [ 945.912596] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 945.912596] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529ac902-125e-f215-d59c-2a6c975ca948" [ 945.912596] env[63345]: _type = "HttpNfcLease" [ 945.912596] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 945.923054] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017501, 'name': CopyVirtualDisk_Task} progress is 26%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.924095] env[63345]: DEBUG nova.network.neutron [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Updating instance_info_cache with network_info: [{"id": "bbb77ecb-8d67-4f41-9505-2571225c8480", "address": "fa:16:3e:55:21:35", "network": {"id": "372a3368-2d7a-4380-b811-7ad477d85250", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-454648225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41afa63287424a549133615eb390bac7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbb77ecb-8d", "ovs_interfaceid": "bbb77ecb-8d67-4f41-9505-2571225c8480", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.200668] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017503, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.235356] env[63345]: DEBUG nova.scheduler.client.report [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 946.260328] env[63345]: DEBUG nova.compute.manager [req-a49550b7-be63-4c11-bb49-09f87712a75a req-cca816e3-87b5-43e3-b159-84504b1d4104 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Received event network-vif-deleted-2b931f56-815d-48ec-915d-c68e2ae0333f {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 946.260610] env[63345]: INFO nova.compute.manager [req-a49550b7-be63-4c11-bb49-09f87712a75a req-cca816e3-87b5-43e3-b159-84504b1d4104 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Neutron deleted interface 2b931f56-815d-48ec-915d-c68e2ae0333f; detaching it from the instance and deleting it from the info cache [ 946.261256] env[63345]: DEBUG nova.network.neutron [req-a49550b7-be63-4c11-bb49-09f87712a75a req-cca816e3-87b5-43e3-b159-84504b1d4104 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Updating instance_info_cache with network_info: [{"id": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "address": "fa:16:3e:80:1f:9f", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e054cb2-eb", "ovs_interfaceid": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0ca27a73-4f2c-47db-b68f-966110d6d772", "address": "fa:16:3e:4d:52:45", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ca27a73-4f", "ovs_interfaceid": "0ca27a73-4f2c-47db-b68f-966110d6d772", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.341857] env[63345]: DEBUG nova.network.neutron [-] [instance: b5173471-3367-42ba-b450-62ad8573f048] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.415435] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 946.415435] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529ac902-125e-f215-d59c-2a6c975ca948" [ 946.415435] env[63345]: _type = "HttpNfcLease" [ 946.415435] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 946.425601] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017501, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.426202] env[63345]: DEBUG oslo_concurrency.lockutils [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Releasing lock "refresh_cache-a0eb9dae-0d27-419f-9210-eaa445e564c8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.426526] env[63345]: DEBUG nova.compute.manager [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Instance network_info: |[{"id": "bbb77ecb-8d67-4f41-9505-2571225c8480", "address": "fa:16:3e:55:21:35", "network": {"id": "372a3368-2d7a-4380-b811-7ad477d85250", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-454648225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41afa63287424a549133615eb390bac7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbb77ecb-8d", "ovs_interfaceid": "bbb77ecb-8d67-4f41-9505-2571225c8480", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 946.426953] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:21:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b8af79a-31d5-4d78-93d7-3919aa1d9186', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bbb77ecb-8d67-4f41-9505-2571225c8480', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 946.435162] env[63345]: DEBUG oslo.service.loopingcall [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 946.435470] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 946.436131] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f61b08d3-eb0a-4ace-9574-318c34e13f82 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.456525] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 946.456525] env[63345]: value = "task-1017504" [ 946.456525] env[63345]: _type = "Task" [ 946.456525] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.467062] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017504, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.571012] env[63345]: DEBUG nova.compute.manager [req-66c2247e-e042-434e-bacb-cfd4b4205883 req-b9e76429-9db0-4621-bf5e-0bde8793d141 service nova] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Received event network-changed-bbb77ecb-8d67-4f41-9505-2571225c8480 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 946.571252] env[63345]: DEBUG nova.compute.manager [req-66c2247e-e042-434e-bacb-cfd4b4205883 req-b9e76429-9db0-4621-bf5e-0bde8793d141 service nova] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Refreshing instance network info cache due to event network-changed-bbb77ecb-8d67-4f41-9505-2571225c8480. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 946.571484] env[63345]: DEBUG oslo_concurrency.lockutils [req-66c2247e-e042-434e-bacb-cfd4b4205883 req-b9e76429-9db0-4621-bf5e-0bde8793d141 service nova] Acquiring lock "refresh_cache-a0eb9dae-0d27-419f-9210-eaa445e564c8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.571633] env[63345]: DEBUG oslo_concurrency.lockutils [req-66c2247e-e042-434e-bacb-cfd4b4205883 req-b9e76429-9db0-4621-bf5e-0bde8793d141 service nova] Acquired lock "refresh_cache-a0eb9dae-0d27-419f-9210-eaa445e564c8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.571844] env[63345]: DEBUG nova.network.neutron [req-66c2247e-e042-434e-bacb-cfd4b4205883 req-b9e76429-9db0-4621-bf5e-0bde8793d141 service nova] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Refreshing network info cache for port bbb77ecb-8d67-4f41-9505-2571225c8480 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 946.701484] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017503, 'name': ReconfigVM_Task, 'duration_secs': 1.519196} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.702243] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Reconfigured VM instance instance-0000005a to attach disk [datastore2] 49cf9c08-4024-40aa-9370-7b4f8d89e2cf/49cf9c08-4024-40aa-9370-7b4f8d89e2cf.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 946.703034] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-413a10b3-9111-4e00-a62c-a05fe173198b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.713286] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 946.713286] env[63345]: value = "task-1017505" [ 946.713286] env[63345]: _type = "Task" [ 946.713286] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.724187] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017505, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.742383] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.415s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.742609] env[63345]: INFO nova.compute.manager [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Migrating [ 946.754861] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.385s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.755223] env[63345]: DEBUG nova.objects.instance [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lazy-loading 'resources' on Instance uuid dde93fd5-6312-4d91-b041-b7fc84b207d3 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.765217] env[63345]: DEBUG oslo_concurrency.lockutils [req-a49550b7-be63-4c11-bb49-09f87712a75a req-cca816e3-87b5-43e3-b159-84504b1d4104 service nova] Acquiring lock "0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.765429] env[63345]: DEBUG oslo_concurrency.lockutils [req-a49550b7-be63-4c11-bb49-09f87712a75a req-cca816e3-87b5-43e3-b159-84504b1d4104 service nova] Acquired lock "0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.766966] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d157b0-33a4-49a4-804f-879d45ba7b35 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.791577] env[63345]: DEBUG oslo_concurrency.lockutils [req-a49550b7-be63-4c11-bb49-09f87712a75a req-cca816e3-87b5-43e3-b159-84504b1d4104 service nova] Releasing lock "0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.792030] env[63345]: WARNING nova.compute.manager [req-a49550b7-be63-4c11-bb49-09f87712a75a req-cca816e3-87b5-43e3-b159-84504b1d4104 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Detach interface failed, port_id=2b931f56-815d-48ec-915d-c68e2ae0333f, reason: No device with interface-id 2b931f56-815d-48ec-915d-c68e2ae0333f exists on VM: nova.exception.NotFound: No device with interface-id 2b931f56-815d-48ec-915d-c68e2ae0333f exists on VM [ 946.844537] env[63345]: INFO nova.compute.manager [-] [instance: b5173471-3367-42ba-b450-62ad8573f048] Took 1.41 seconds to deallocate network for instance. [ 946.922335] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 946.922335] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529ac902-125e-f215-d59c-2a6c975ca948" [ 946.922335] env[63345]: _type = "HttpNfcLease" [ 946.922335] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 946.931533] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017501, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.971598] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017504, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.230223] env[63345]: DEBUG oslo_concurrency.lockutils [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.230434] env[63345]: DEBUG oslo_concurrency.lockutils [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.230670] env[63345]: DEBUG nova.network.neutron [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 947.232848] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017505, 'name': Rename_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.263633] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "refresh_cache-0fe61754-458c-4c5c-bb2d-2677302e5fb9" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.263898] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "refresh_cache-0fe61754-458c-4c5c-bb2d-2677302e5fb9" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.264020] env[63345]: DEBUG nova.network.neutron [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 947.356849] env[63345]: DEBUG oslo_concurrency.lockutils [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.429642] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 947.429642] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529ac902-125e-f215-d59c-2a6c975ca948" [ 947.429642] env[63345]: _type = "HttpNfcLease" [ 947.429642] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 947.437116] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017501, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.473588] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017504, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.572145] env[63345]: DEBUG nova.network.neutron [req-66c2247e-e042-434e-bacb-cfd4b4205883 req-b9e76429-9db0-4621-bf5e-0bde8793d141 service nova] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Updated VIF entry in instance network info cache for port bbb77ecb-8d67-4f41-9505-2571225c8480. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 947.572551] env[63345]: DEBUG nova.network.neutron [req-66c2247e-e042-434e-bacb-cfd4b4205883 req-b9e76429-9db0-4621-bf5e-0bde8793d141 service nova] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Updating instance_info_cache with network_info: [{"id": "bbb77ecb-8d67-4f41-9505-2571225c8480", "address": "fa:16:3e:55:21:35", "network": {"id": "372a3368-2d7a-4380-b811-7ad477d85250", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-454648225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41afa63287424a549133615eb390bac7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbb77ecb-8d", "ovs_interfaceid": "bbb77ecb-8d67-4f41-9505-2571225c8480", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.591517] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02398482-b249-4418-a22a-a0bdaa47b9df {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.603760] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61fb88f-1e12-4dc8-979b-1e1ee1ab5407 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.642178] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3b9ea8-dbc3-4bb8-acd9-28c2f9845804 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.651464] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7ec800-d803-4563-929c-715eaa8d7025 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.668512] env[63345]: DEBUG nova.compute.provider_tree [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.728032] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017505, 'name': Rename_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.928430] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 947.928430] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529ac902-125e-f215-d59c-2a6c975ca948" [ 947.928430] env[63345]: _type = "HttpNfcLease" [ 947.928430] env[63345]: } is ready. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 947.929160] env[63345]: DEBUG oslo_vmware.rw_handles [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 947.929160] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529ac902-125e-f215-d59c-2a6c975ca948" [ 947.929160] env[63345]: _type = "HttpNfcLease" [ 947.929160] env[63345]: }. {{(pid=63345) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 947.929931] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6397f2e-64d4-44d2-ba07-4d3aead20d75 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.937116] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017501, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.798608} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.937845] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/163b8ba5-dbfa-4890-b990-7e227e0ccf91/163b8ba5-dbfa-4890-b990-7e227e0ccf91.vmdk to [datastore2] 070a834d-6478-4705-8df0-2a27c8780507/070a834d-6478-4705-8df0-2a27c8780507.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 947.938963] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d5efaf9-90bf-4994-a75e-c71ed0e6f925 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.944899] env[63345]: DEBUG oslo_vmware.rw_handles [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5232c0ab-16cb-bc96-20ad-4ea35537442f/disk-0.vmdk from lease info. {{(pid=63345) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 947.945101] env[63345]: DEBUG oslo_vmware.rw_handles [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Creating HTTP connection to write to file with size = 31666688 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5232c0ab-16cb-bc96-20ad-4ea35537442f/disk-0.vmdk. {{(pid=63345) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 948.028918] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] 070a834d-6478-4705-8df0-2a27c8780507/070a834d-6478-4705-8df0-2a27c8780507.vmdk or device None with type streamOptimized {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 948.031323] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-538f2663-8e11-4c0c-9850-d552551cf0d3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.050711] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cec8d5e3-3397-4f67-9515-bd7de11846fa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.055736] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017504, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.057627] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 948.057627] env[63345]: value = "task-1017506" [ 948.057627] env[63345]: _type = "Task" [ 948.057627] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.067368] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017506, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.078720] env[63345]: DEBUG oslo_concurrency.lockutils [req-66c2247e-e042-434e-bacb-cfd4b4205883 req-b9e76429-9db0-4621-bf5e-0bde8793d141 service nova] Releasing lock "refresh_cache-a0eb9dae-0d27-419f-9210-eaa445e564c8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.079211] env[63345]: DEBUG nova.compute.manager [req-66c2247e-e042-434e-bacb-cfd4b4205883 req-b9e76429-9db0-4621-bf5e-0bde8793d141 service nova] [instance: b5173471-3367-42ba-b450-62ad8573f048] Received event network-vif-deleted-9dc2d1aa-5968-48ba-9b48-23f87e1e9419 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 948.172106] env[63345]: DEBUG nova.scheduler.client.report [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 948.222683] env[63345]: INFO nova.network.neutron [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Port 0ca27a73-4f2c-47db-b68f-966110d6d772 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 948.223154] env[63345]: DEBUG nova.network.neutron [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Updating instance_info_cache with network_info: [{"id": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "address": "fa:16:3e:80:1f:9f", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e054cb2-eb", "ovs_interfaceid": "9e054cb2-eb47-4dd3-8ec7-d8205d577337", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.233652] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017505, 'name': Rename_Task, 'duration_secs': 1.020665} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.233950] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 948.235636] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fde27c8c-5cfa-4f8e-9195-b36aa4ce548a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.245186] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 948.245186] env[63345]: value = "task-1017507" [ 948.245186] env[63345]: _type = "Task" [ 948.245186] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.256655] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017507, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.365169] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "0da64b45-fa00-4fe8-8d1d-df586f27743f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.365700] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "0da64b45-fa00-4fe8-8d1d-df586f27743f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.365809] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "0da64b45-fa00-4fe8-8d1d-df586f27743f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.366223] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "0da64b45-fa00-4fe8-8d1d-df586f27743f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.366726] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "0da64b45-fa00-4fe8-8d1d-df586f27743f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.369083] env[63345]: INFO nova.compute.manager [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Terminating instance [ 948.407017] env[63345]: DEBUG nova.network.neutron [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Updating instance_info_cache with network_info: [{"id": "0cc6f455-5ad2-4802-a0ff-42268fe50023", "address": "fa:16:3e:61:01:ef", "network": {"id": "80bb8388-e130-46af-a4fc-1daea51d1bf5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1343573007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "964cee117b3c4601b3afe82a8bb9c23e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0cc6f455-5a", "ovs_interfaceid": "0cc6f455-5ad2-4802-a0ff-42268fe50023", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.514276] env[63345]: DEBUG nova.compute.manager [req-9d34107a-91b2-4a34-865b-26e4bd3b1a2d req-d8c9cd9c-8b4c-43de-af8c-10b0b40bab58 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Received event network-vif-deleted-0ca27a73-4f2c-47db-b68f-966110d6d772 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 948.523739] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017504, 'name': CreateVM_Task, 'duration_secs': 1.670578} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.523739] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 948.525198] env[63345]: DEBUG oslo_concurrency.lockutils [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.525198] env[63345]: DEBUG oslo_concurrency.lockutils [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.525449] env[63345]: DEBUG oslo_concurrency.lockutils [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 948.525504] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d4d174f-6dc6-49a5-b457-1029da179c28 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.533070] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 948.533070] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52a26b38-7784-75fc-b93d-7b33d1cb5a49" [ 948.533070] env[63345]: _type = "Task" [ 948.533070] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.542082] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a26b38-7784-75fc-b93d-7b33d1cb5a49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.570584] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017506, 'name': ReconfigVM_Task, 'duration_secs': 0.381373} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.570809] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Reconfigured VM instance instance-00000020 to attach disk [datastore2] 070a834d-6478-4705-8df0-2a27c8780507/070a834d-6478-4705-8df0-2a27c8780507.vmdk or device None with type streamOptimized {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 948.571669] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a59d26ae-dfba-4d7f-8234-9e1d8be01a63 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.580312] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 948.580312] env[63345]: value = "task-1017508" [ 948.580312] env[63345]: _type = "Task" [ 948.580312] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.590903] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017508, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.677116] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.922s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.681803] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.943s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.682128] env[63345]: DEBUG nova.objects.instance [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lazy-loading 'resources' on Instance uuid e5546a26-3f94-48a6-914a-2c37e63a0aeb {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 948.706262] env[63345]: INFO nova.scheduler.client.report [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Deleted allocations for instance dde93fd5-6312-4d91-b041-b7fc84b207d3 [ 948.726380] env[63345]: DEBUG oslo_concurrency.lockutils [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "refresh_cache-0da64b45-fa00-4fe8-8d1d-df586f27743f" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.757450] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017507, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.771373] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "4868a0a0-ca35-44b0-a90c-124aa366af76" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.771373] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "4868a0a0-ca35-44b0-a90c-124aa366af76" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.877022] env[63345]: DEBUG nova.compute.manager [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 948.877022] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 948.877022] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2fc3bf-3f6f-4bb3-a27c-fb7b7905a9fe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.885018] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 948.885382] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef97553c-4566-4656-9565-93f1b42e2d16 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.894179] env[63345]: DEBUG oslo_vmware.api [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 948.894179] env[63345]: value = "task-1017509" [ 948.894179] env[63345]: _type = "Task" [ 948.894179] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.904250] env[63345]: DEBUG oslo_vmware.api [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017509, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.910662] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "refresh_cache-0fe61754-458c-4c5c-bb2d-2677302e5fb9" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.048866] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a26b38-7784-75fc-b93d-7b33d1cb5a49, 'name': SearchDatastore_Task, 'duration_secs': 0.037056} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.051044] env[63345]: DEBUG oslo_concurrency.lockutils [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.051658] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 949.052094] env[63345]: DEBUG oslo_concurrency.lockutils [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.052277] env[63345]: DEBUG oslo_concurrency.lockutils [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.052560] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 949.052847] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac45e5be-c68b-4f58-8e2a-dd97e8e52813 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.064370] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 949.064741] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 949.067116] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c62fe436-168e-4b14-9fed-771dd2a9c2b7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.075878] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 949.075878] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52ad6ab9-3f86-795a-40db-ab7353c25c13" [ 949.075878] env[63345]: _type = "Task" [ 949.075878] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.090641] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52ad6ab9-3f86-795a-40db-ab7353c25c13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.098363] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017508, 'name': Rename_Task, 'duration_secs': 0.188093} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.100160] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 949.100473] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bdd624f9-80fd-459b-8201-9c3eb3444d5a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.110919] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 949.110919] env[63345]: value = "task-1017510" [ 949.110919] env[63345]: _type = "Task" [ 949.110919] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.123845] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017510, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.216072] env[63345]: DEBUG oslo_concurrency.lockutils [None req-3fa755dc-bfd1-4cae-9cf9-c0d907f64b33 tempest-ServerRescueTestJSON-1502120765 tempest-ServerRescueTestJSON-1502120765-project-member] Lock "dde93fd5-6312-4d91-b041-b7fc84b207d3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.339s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.233674] env[63345]: DEBUG oslo_concurrency.lockutils [None req-80f120a0-d136-4dad-a2ea-97acb42a180b tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-0da64b45-fa00-4fe8-8d1d-df586f27743f-2b931f56-815d-48ec-915d-c68e2ae0333f" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.135s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.265395] env[63345]: DEBUG oslo_vmware.api [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017507, 'name': PowerOnVM_Task, 'duration_secs': 0.782669} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.267797] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 949.267974] env[63345]: INFO nova.compute.manager [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Took 12.87 seconds to spawn the instance on the hypervisor. [ 949.268176] env[63345]: DEBUG nova.compute.manager [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 949.271924] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1eb98e-e38d-420a-882a-d85d21ce6755 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.278463] env[63345]: DEBUG nova.compute.utils [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 949.412091] env[63345]: DEBUG oslo_vmware.api [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017509, 'name': PowerOffVM_Task, 'duration_secs': 0.267053} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.414596] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 949.414804] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 949.417344] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1566dee-bd1c-43f8-a728-13b7d188406a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.501417] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efdf45df-0665-4722-b8b7-b3f8966f79fd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.512561] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c670091-54c8-4e31-89c3-42c9dde59ae7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.559796] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c97a80d-acf2-48f5-8583-c2b426c6a3be {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.571327] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a5b5a5-24a8-40f0-aa8a-2032205247ea {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.576614] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 949.577088] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 949.577391] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Deleting the datastore file [datastore2] 0da64b45-fa00-4fe8-8d1d-df586f27743f {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 949.579847] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b8df463-9e65-4c01-a2c7-967a261573d9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.597316] env[63345]: DEBUG nova.compute.provider_tree [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.606203] env[63345]: DEBUG oslo_vmware.api [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 949.606203] env[63345]: value = "task-1017512" [ 949.606203] env[63345]: _type = "Task" [ 949.606203] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.606780] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52ad6ab9-3f86-795a-40db-ab7353c25c13, 'name': SearchDatastore_Task, 'duration_secs': 0.011725} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.610835] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c42bfd10-b8d9-47fa-8c34-d5f3b143d0b2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.638883] env[63345]: DEBUG oslo_vmware.api [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017512, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.643282] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 949.643282] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5277b9e2-4347-1296-19bd-2ad263c9f8a0" [ 949.643282] env[63345]: _type = "Task" [ 949.643282] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.643594] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017510, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.653834] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5277b9e2-4347-1296-19bd-2ad263c9f8a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.784873] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "4868a0a0-ca35-44b0-a90c-124aa366af76" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.014s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.800701] env[63345]: INFO nova.compute.manager [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Took 36.99 seconds to build instance. [ 949.813548] env[63345]: DEBUG oslo_vmware.rw_handles [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Completed reading data from the image iterator. {{(pid=63345) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 949.813890] env[63345]: DEBUG oslo_vmware.rw_handles [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5232c0ab-16cb-bc96-20ad-4ea35537442f/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 949.815467] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956f1522-1fd3-4695-8013-02e202d8ff22 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.824565] env[63345]: DEBUG oslo_vmware.rw_handles [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5232c0ab-16cb-bc96-20ad-4ea35537442f/disk-0.vmdk is in state: ready. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 949.824764] env[63345]: DEBUG oslo_vmware.rw_handles [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5232c0ab-16cb-bc96-20ad-4ea35537442f/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 949.825042] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-cd90fd2a-1550-4aa7-975d-f0988d845039 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.108170] env[63345]: DEBUG nova.scheduler.client.report [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 950.122626] env[63345]: DEBUG oslo_vmware.api [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017512, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.279098} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.123515] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 950.123620] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 950.123744] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 950.123968] env[63345]: INFO nova.compute.manager [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Took 1.25 seconds to destroy the instance on the hypervisor. [ 950.124245] env[63345]: DEBUG oslo.service.loopingcall [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 950.124451] env[63345]: DEBUG nova.compute.manager [-] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 950.124544] env[63345]: DEBUG nova.network.neutron [-] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 950.130360] env[63345]: DEBUG oslo_vmware.api [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017510, 'name': PowerOnVM_Task, 'duration_secs': 0.64791} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.130910] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 950.156811] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5277b9e2-4347-1296-19bd-2ad263c9f8a0, 'name': SearchDatastore_Task, 'duration_secs': 0.033903} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.157157] env[63345]: DEBUG oslo_concurrency.lockutils [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.157635] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] a0eb9dae-0d27-419f-9210-eaa445e564c8/a0eb9dae-0d27-419f-9210-eaa445e564c8.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 950.157754] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c99198f1-6952-4e9a-b6b0-a2727a18f6b0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.165816] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 950.165816] env[63345]: value = "task-1017513" [ 950.165816] env[63345]: _type = "Task" [ 950.165816] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.176837] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017513, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.210948] env[63345]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 0ca27a73-4f2c-47db-b68f-966110d6d772 could not be found.", "detail": ""}} {{(pid=63345) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 950.211317] env[63345]: DEBUG nova.network.neutron [-] Unable to show port 0ca27a73-4f2c-47db-b68f-966110d6d772 as it no longer exists. {{(pid=63345) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 950.256712] env[63345]: DEBUG nova.compute.manager [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 950.258238] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c36267-9286-496a-be5d-9c7b19728975 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.302504] env[63345]: DEBUG oslo_concurrency.lockutils [None req-da97615d-0381-42b0-88cd-70ad732c54c9 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "49cf9c08-4024-40aa-9370-7b4f8d89e2cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.507s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.317012] env[63345]: DEBUG oslo_vmware.rw_handles [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5232c0ab-16cb-bc96-20ad-4ea35537442f/disk-0.vmdk. {{(pid=63345) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 950.317388] env[63345]: INFO nova.virt.vmwareapi.images [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Downloaded image file data 487d34b5-b85c-4d35-8b15-fd7347b3dcfd [ 950.318474] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e549cbc8-4edb-438f-ba66-db12657c42c1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.339331] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9a9c826-d0f2-40d4-8f0e-83b3212a3481 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.369589] env[63345]: INFO nova.virt.vmwareapi.images [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] The imported VM was unregistered [ 950.372260] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Caching image {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 950.372510] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Creating directory with path [datastore2] devstack-image-cache_base/487d34b5-b85c-4d35-8b15-fd7347b3dcfd {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 950.372858] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-901e7b26-52b2-4fe4-b70a-8288e04fd654 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.390031] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Created directory with path [datastore2] devstack-image-cache_base/487d34b5-b85c-4d35-8b15-fd7347b3dcfd {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 950.390319] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_6b9edf2c-e274-4057-9764-29012b7e5e4c/OSTACK_IMG_6b9edf2c-e274-4057-9764-29012b7e5e4c.vmdk to [datastore2] devstack-image-cache_base/487d34b5-b85c-4d35-8b15-fd7347b3dcfd/487d34b5-b85c-4d35-8b15-fd7347b3dcfd.vmdk. {{(pid=63345) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 950.390803] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-adb43802-f163-40f5-a7dc-c3a96d1705ac {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.402601] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 950.402601] env[63345]: value = "task-1017515" [ 950.402601] env[63345]: _type = "Task" [ 950.402601] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.418026] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017515, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.432965] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268031c2-91e3-4e78-92d9-7629be73a279 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.455082] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Updating instance '0fe61754-458c-4c5c-bb2d-2677302e5fb9' progress to 0 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 950.616014] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.934s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.621523] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.107s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.623315] env[63345]: INFO nova.compute.claims [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 950.667527] env[63345]: INFO nova.scheduler.client.report [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleted allocations for instance e5546a26-3f94-48a6-914a-2c37e63a0aeb [ 950.686872] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017513, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.787022] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5e17d4f9-249b-4bce-a31d-a0d51b5edde3 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "070a834d-6478-4705-8df0-2a27c8780507" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 48.479s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.892282] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "4868a0a0-ca35-44b0-a90c-124aa366af76" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.892513] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "4868a0a0-ca35-44b0-a90c-124aa366af76" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.892772] env[63345]: INFO nova.compute.manager [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Attaching volume f3eb7f29-d3fd-4c7d-ab93-1582eb175324 to /dev/sdb [ 950.962768] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017515, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.962768] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7278cfca-8de0-4cff-a28a-fbff994fcfb7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.962768] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 950.963424] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-324d8e9b-104d-4374-8165-a3fe6861667a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.973250] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b63c75-876d-4703-b4f5-7ec4901702cf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.976272] env[63345]: DEBUG oslo_vmware.api [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 950.976272] env[63345]: value = "task-1017516" [ 950.976272] env[63345]: _type = "Task" [ 950.976272] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.987816] env[63345]: DEBUG oslo_vmware.api [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017516, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.995618] env[63345]: DEBUG nova.virt.block_device [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Updating existing volume attachment record: 0b7dec99-b6d5-43b3-8d68-9a0f003fca44 {{(pid=63345) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 951.187194] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017513, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.666614} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.188645] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b1aba700-aa3f-4db6-97fd-5ef1a47430df tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "e5546a26-3f94-48a6-914a-2c37e63a0aeb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.617s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.189816] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] a0eb9dae-0d27-419f-9210-eaa445e564c8/a0eb9dae-0d27-419f-9210-eaa445e564c8.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 951.190059] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 951.193313] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ddcd6e31-09bb-46fc-ba32-9e35fe14aa16 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.202986] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 951.202986] env[63345]: value = "task-1017517" [ 951.202986] env[63345]: _type = "Task" [ 951.202986] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.220038] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017517, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.422471] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017515, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.489985] env[63345]: DEBUG oslo_vmware.api [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017516, 'name': PowerOffVM_Task, 'duration_secs': 0.253629} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.489985] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 951.489985] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Updating instance '0fe61754-458c-4c5c-bb2d-2677302e5fb9' progress to 17 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 951.721420] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017517, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.917422] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017515, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.933653] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe390a0c-34a2-4ddd-893c-cbbe789cc479 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.945460] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0693032-6c1d-4428-9fb2-0d33aee36039 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.987103] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bbd3aa4-3607-4755-9191-fd59237cdc40 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.996942] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 951.998027] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 951.998027] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 951.998027] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 951.998027] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 951.998545] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 951.998797] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 951.998974] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 951.999243] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 951.999469] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 951.999676] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 952.008692] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d85c7fb5-1321-499c-8b57-33d023a7afd1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.022543] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62621b80-de32-4c85-933e-3ea8672a5664 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.045816] env[63345]: DEBUG nova.compute.provider_tree [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.048886] env[63345]: DEBUG oslo_vmware.api [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 952.048886] env[63345]: value = "task-1017521" [ 952.048886] env[63345]: _type = "Task" [ 952.048886] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.064853] env[63345]: DEBUG oslo_vmware.api [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017521, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.089030] env[63345]: DEBUG nova.compute.manager [req-0827b67f-0dab-4f6e-a899-2295278c1a20 req-a1dca79a-50ef-480e-be53-9c91eb22f262 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Received event network-vif-deleted-9e054cb2-eb47-4dd3-8ec7-d8205d577337 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 952.089030] env[63345]: INFO nova.compute.manager [req-0827b67f-0dab-4f6e-a899-2295278c1a20 req-a1dca79a-50ef-480e-be53-9c91eb22f262 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Neutron deleted interface 9e054cb2-eb47-4dd3-8ec7-d8205d577337; detaching it from the instance and deleting it from the info cache [ 952.089030] env[63345]: DEBUG nova.network.neutron [req-0827b67f-0dab-4f6e-a899-2295278c1a20 req-a1dca79a-50ef-480e-be53-9c91eb22f262 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.219907] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017517, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.624842} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.219907] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 952.220550] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ecb2fb6-ccad-4f7b-997b-4c951b2e4936 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.248390] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] a0eb9dae-0d27-419f-9210-eaa445e564c8/a0eb9dae-0d27-419f-9210-eaa445e564c8.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 952.249474] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ada682d7-f3ca-42e9-80e6-387b52eb0911 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.274859] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 952.274859] env[63345]: value = "task-1017522" [ 952.274859] env[63345]: _type = "Task" [ 952.274859] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.289581] env[63345]: DEBUG nova.network.neutron [-] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.291275] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017522, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.420910] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017515, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.550925] env[63345]: DEBUG nova.scheduler.client.report [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 952.578408] env[63345]: DEBUG oslo_vmware.api [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017521, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.591213] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-49dfd3eb-135a-41ff-9f58-d616e65aba6a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.604554] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177af066-9913-40c6-b336-251defe6a168 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.663720] env[63345]: DEBUG nova.compute.manager [req-0827b67f-0dab-4f6e-a899-2295278c1a20 req-a1dca79a-50ef-480e-be53-9c91eb22f262 service nova] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Detach interface failed, port_id=9e054cb2-eb47-4dd3-8ec7-d8205d577337, reason: Instance 0da64b45-fa00-4fe8-8d1d-df586f27743f could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 952.667538] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "49cf9c08-4024-40aa-9370-7b4f8d89e2cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.667863] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "49cf9c08-4024-40aa-9370-7b4f8d89e2cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.668229] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "49cf9c08-4024-40aa-9370-7b4f8d89e2cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.668323] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "49cf9c08-4024-40aa-9370-7b4f8d89e2cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.668506] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "49cf9c08-4024-40aa-9370-7b4f8d89e2cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.671043] env[63345]: INFO nova.compute.manager [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Terminating instance [ 952.790530] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017522, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.793343] env[63345]: INFO nova.compute.manager [-] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Took 2.67 seconds to deallocate network for instance. [ 952.918790] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017515, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.066094] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.446s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.066730] env[63345]: DEBUG nova.compute.manager [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 953.073766] env[63345]: DEBUG oslo_concurrency.lockutils [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.717s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.074084] env[63345]: DEBUG nova.objects.instance [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lazy-loading 'resources' on Instance uuid b5173471-3367-42ba-b450-62ad8573f048 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 953.075214] env[63345]: DEBUG oslo_vmware.api [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017521, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.179513] env[63345]: DEBUG nova.compute.manager [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 953.179944] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 953.180918] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0adda85c-6452-4bfc-acf7-c3588f6b05d6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.191931] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 953.192271] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2dacbd90-704a-4c2c-9832-6eeb08f6dfd7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.203296] env[63345]: DEBUG oslo_vmware.api [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 953.203296] env[63345]: value = "task-1017523" [ 953.203296] env[63345]: _type = "Task" [ 953.203296] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.216126] env[63345]: DEBUG oslo_vmware.api [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017523, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.292768] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017522, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.301380] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.418761] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017515, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.571628] env[63345]: DEBUG oslo_vmware.api [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017521, 'name': ReconfigVM_Task, 'duration_secs': 1.32244} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.574561] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Updating instance '0fe61754-458c-4c5c-bb2d-2677302e5fb9' progress to 33 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 953.577946] env[63345]: DEBUG nova.compute.utils [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 953.579865] env[63345]: DEBUG nova.compute.manager [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 953.580133] env[63345]: DEBUG nova.network.neutron [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 953.637556] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e9c1ba-5513-460b-b299-eb8fc921a654 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.648393] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d74761b3-1a18-47af-b678-06d3abb9ad64 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Suspending the VM {{(pid=63345) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 953.648959] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-c4531c95-5c2e-4499-9365-80c1eff03724 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.658058] env[63345]: DEBUG oslo_vmware.api [None req-d74761b3-1a18-47af-b678-06d3abb9ad64 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 953.658058] env[63345]: value = "task-1017524" [ 953.658058] env[63345]: _type = "Task" [ 953.658058] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.668646] env[63345]: DEBUG nova.policy [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd758ccdfee1a4d2fa297226106958b66', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68de4d93c3db4387a4d13c86cc1c77db', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 953.674131] env[63345]: DEBUG oslo_vmware.api [None req-d74761b3-1a18-47af-b678-06d3abb9ad64 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017524, 'name': SuspendVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.713499] env[63345]: DEBUG oslo_vmware.api [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017523, 'name': PowerOffVM_Task, 'duration_secs': 0.379302} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.716085] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 953.716286] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 953.716721] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a6b61686-d951-4302-9b52-785468aea1d3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.790097] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017522, 'name': ReconfigVM_Task, 'duration_secs': 1.430209} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.790097] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Reconfigured VM instance instance-0000005b to attach disk [datastore2] a0eb9dae-0d27-419f-9210-eaa445e564c8/a0eb9dae-0d27-419f-9210-eaa445e564c8.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 953.790097] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b25c1094-1bc3-4b39-a7ee-eb8d5ce4b007 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.795840] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 953.795840] env[63345]: value = "task-1017526" [ 953.795840] env[63345]: _type = "Task" [ 953.795840] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.797688] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 953.797793] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 953.798052] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleting the datastore file [datastore2] 49cf9c08-4024-40aa-9370-7b4f8d89e2cf {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 953.803876] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bda57e3f-bab5-482e-a24f-ad870a027128 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.813221] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017526, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.814906] env[63345]: DEBUG oslo_vmware.api [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 953.814906] env[63345]: value = "task-1017527" [ 953.814906] env[63345]: _type = "Task" [ 953.814906] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.823319] env[63345]: DEBUG oslo_vmware.api [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017527, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.864018] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddba7e14-73ef-4ed0-b774-d84b7229257e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.872184] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd85a5b4-8950-4381-a47d-794af1424389 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.915531] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45bdb2da-277d-4d3a-b17a-58d356931a5e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.927926] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017515, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.109182} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.927926] env[63345]: INFO nova.virt.vmwareapi.ds_util [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_6b9edf2c-e274-4057-9764-29012b7e5e4c/OSTACK_IMG_6b9edf2c-e274-4057-9764-29012b7e5e4c.vmdk to [datastore2] devstack-image-cache_base/487d34b5-b85c-4d35-8b15-fd7347b3dcfd/487d34b5-b85c-4d35-8b15-fd7347b3dcfd.vmdk. [ 953.928505] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Cleaning up location [datastore2] OSTACK_IMG_6b9edf2c-e274-4057-9764-29012b7e5e4c {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 953.928751] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_6b9edf2c-e274-4057-9764-29012b7e5e4c {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 953.930406] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece0a4da-a952-4de2-b2fa-64cb31f750c0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.935704] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c135013-335b-4016-a1ae-9a7e4981741d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.950825] env[63345]: DEBUG nova.compute.provider_tree [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 953.956148] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 953.956148] env[63345]: value = "task-1017529" [ 953.956148] env[63345]: _type = "Task" [ 953.956148] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.965320] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017529, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.082323] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 954.082606] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 954.082796] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 954.082990] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 954.083179] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 954.083347] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 954.083559] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 954.083747] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 954.084252] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 954.084411] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 954.084648] env[63345]: DEBUG nova.virt.hardware [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 954.091166] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Reconfiguring VM instance instance-00000058 to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 954.092158] env[63345]: DEBUG nova.compute.manager [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 954.094495] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c928520c-66f4-4eeb-90eb-27854ee31197 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.115493] env[63345]: DEBUG nova.network.neutron [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Successfully created port: eaf797ae-2e07-4553-aaab-deed7e3f45a1 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 954.117936] env[63345]: DEBUG oslo_vmware.api [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 954.117936] env[63345]: value = "task-1017530" [ 954.117936] env[63345]: _type = "Task" [ 954.117936] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.130356] env[63345]: DEBUG oslo_vmware.api [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017530, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.173430] env[63345]: DEBUG oslo_vmware.api [None req-d74761b3-1a18-47af-b678-06d3abb9ad64 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017524, 'name': SuspendVM_Task} progress is 58%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.307578] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017526, 'name': Rename_Task, 'duration_secs': 0.300105} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.307874] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 954.308230] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8064d65-6229-4a4f-95a3-f1a24cfff757 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.315573] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 954.315573] env[63345]: value = "task-1017531" [ 954.315573] env[63345]: _type = "Task" [ 954.315573] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.328019] env[63345]: DEBUG oslo_vmware.api [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017527, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.313755} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.331561] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 954.331942] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 954.332262] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 954.332575] env[63345]: INFO nova.compute.manager [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Took 1.15 seconds to destroy the instance on the hypervisor. [ 954.332938] env[63345]: DEBUG oslo.service.loopingcall [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 954.333306] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017531, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.333622] env[63345]: DEBUG nova.compute.manager [-] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 954.333806] env[63345]: DEBUG nova.network.neutron [-] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 954.457010] env[63345]: DEBUG nova.scheduler.client.report [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 954.471705] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017529, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086105} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.472093] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 954.472285] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Releasing lock "[datastore2] devstack-image-cache_base/487d34b5-b85c-4d35-8b15-fd7347b3dcfd/487d34b5-b85c-4d35-8b15-fd7347b3dcfd.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.472559] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/487d34b5-b85c-4d35-8b15-fd7347b3dcfd/487d34b5-b85c-4d35-8b15-fd7347b3dcfd.vmdk to [datastore2] 9aa651b8-317d-4153-8c33-9df0a5d16115/9aa651b8-317d-4153-8c33-9df0a5d16115.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 954.472881] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e41e4b0-fa0c-4e19-b5ea-ae104d385fd4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.482928] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 954.482928] env[63345]: value = "task-1017532" [ 954.482928] env[63345]: _type = "Task" [ 954.482928] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.495262] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017532, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.631459] env[63345]: DEBUG oslo_vmware.api [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017530, 'name': ReconfigVM_Task, 'duration_secs': 0.187643} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.631459] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Reconfigured VM instance instance-00000058 to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 954.633151] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4970ad83-93ec-4fb6-b5a0-08c8cfe63f03 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.662584] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 0fe61754-458c-4c5c-bb2d-2677302e5fb9/0fe61754-458c-4c5c-bb2d-2677302e5fb9.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 954.663815] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2663e6a8-4259-4113-b102-4995c42c225a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.693557] env[63345]: DEBUG oslo_vmware.api [None req-d74761b3-1a18-47af-b678-06d3abb9ad64 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017524, 'name': SuspendVM_Task, 'duration_secs': 0.772901} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.695079] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d74761b3-1a18-47af-b678-06d3abb9ad64 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Suspended the VM {{(pid=63345) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 954.695549] env[63345]: DEBUG nova.compute.manager [None req-d74761b3-1a18-47af-b678-06d3abb9ad64 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 954.696329] env[63345]: DEBUG oslo_vmware.api [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 954.696329] env[63345]: value = "task-1017533" [ 954.696329] env[63345]: _type = "Task" [ 954.696329] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.697391] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6645f59-7da9-4f6a-bb27-bfb92065ab5b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.714754] env[63345]: DEBUG oslo_vmware.api [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017533, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.719481] env[63345]: DEBUG oslo_concurrency.lockutils [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "732ac30c-15c1-4c57-bb70-ea3ed51f646b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.719731] env[63345]: DEBUG oslo_concurrency.lockutils [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "732ac30c-15c1-4c57-bb70-ea3ed51f646b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.830281] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017531, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.902893] env[63345]: DEBUG nova.compute.manager [req-0733a161-1173-44bd-960f-9e52e0f60e5f req-9bccab0e-fbf8-4e15-9be6-d8abfbc6f03a service nova] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Received event network-vif-deleted-065f6e50-8edf-4eac-a2e3-d944aa6f33ba {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 954.903148] env[63345]: INFO nova.compute.manager [req-0733a161-1173-44bd-960f-9e52e0f60e5f req-9bccab0e-fbf8-4e15-9be6-d8abfbc6f03a service nova] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Neutron deleted interface 065f6e50-8edf-4eac-a2e3-d944aa6f33ba; detaching it from the instance and deleting it from the info cache [ 954.903451] env[63345]: DEBUG nova.network.neutron [req-0733a161-1173-44bd-960f-9e52e0f60e5f req-9bccab0e-fbf8-4e15-9be6-d8abfbc6f03a service nova] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.967433] env[63345]: DEBUG oslo_concurrency.lockutils [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.894s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.971064] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.670s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.971467] env[63345]: DEBUG nova.objects.instance [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lazy-loading 'resources' on Instance uuid 0da64b45-fa00-4fe8-8d1d-df586f27743f {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 954.997088] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017532, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.003618] env[63345]: INFO nova.scheduler.client.report [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Deleted allocations for instance b5173471-3367-42ba-b450-62ad8573f048 [ 955.120304] env[63345]: DEBUG nova.compute.manager [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 955.148137] env[63345]: DEBUG nova.virt.hardware [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 955.148417] env[63345]: DEBUG nova.virt.hardware [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 955.148583] env[63345]: DEBUG nova.virt.hardware [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 955.148768] env[63345]: DEBUG nova.virt.hardware [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 955.148923] env[63345]: DEBUG nova.virt.hardware [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 955.149305] env[63345]: DEBUG nova.virt.hardware [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 955.149598] env[63345]: DEBUG nova.virt.hardware [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 955.149870] env[63345]: DEBUG nova.virt.hardware [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 955.150110] env[63345]: DEBUG nova.virt.hardware [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 955.150335] env[63345]: DEBUG nova.virt.hardware [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 955.150553] env[63345]: DEBUG nova.virt.hardware [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 955.151532] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3307491d-f5f5-46d2-9b87-3e1f9a9f861e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.161940] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6211d58c-aca0-4ae4-a772-dd2acd244213 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.186424] env[63345]: DEBUG nova.network.neutron [-] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.213630] env[63345]: DEBUG oslo_vmware.api [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017533, 'name': ReconfigVM_Task, 'duration_secs': 0.313826} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.214042] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 0fe61754-458c-4c5c-bb2d-2677302e5fb9/0fe61754-458c-4c5c-bb2d-2677302e5fb9.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 955.214409] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Updating instance '0fe61754-458c-4c5c-bb2d-2677302e5fb9' progress to 50 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 955.222021] env[63345]: DEBUG nova.compute.manager [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 955.331352] env[63345]: DEBUG oslo_vmware.api [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017531, 'name': PowerOnVM_Task, 'duration_secs': 0.809659} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.331657] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 955.331911] env[63345]: INFO nova.compute.manager [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Took 12.92 seconds to spawn the instance on the hypervisor. [ 955.332211] env[63345]: DEBUG nova.compute.manager [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 955.333083] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2235e4ab-51ba-4ba5-b086-34b69c22f2fb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.406400] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-948903e1-20d5-4f6a-a73b-c190484657b8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.420299] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe0e52f-46a7-4799-9a7c-d414e5af2522 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.457134] env[63345]: DEBUG nova.compute.manager [req-0733a161-1173-44bd-960f-9e52e0f60e5f req-9bccab0e-fbf8-4e15-9be6-d8abfbc6f03a service nova] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Detach interface failed, port_id=065f6e50-8edf-4eac-a2e3-d944aa6f33ba, reason: Instance 49cf9c08-4024-40aa-9370-7b4f8d89e2cf could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 955.495560] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017532, 'name': CopyVirtualDisk_Task} progress is 29%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.515491] env[63345]: DEBUG oslo_concurrency.lockutils [None req-66582d54-53eb-4fcf-bbbe-9e58bb4cf0e8 tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "b5173471-3367-42ba-b450-62ad8573f048" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.736s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.689760] env[63345]: INFO nova.compute.manager [-] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Took 1.36 seconds to deallocate network for instance. [ 955.724251] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecac9a50-08a2-4dd3-b199-e38d00572a8c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.735285] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fbe3f06-f884-4193-8de7-7abeff8437a4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.763639] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff579e4-20c5-4ea6-bc35-075d2cca2197 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.785470] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790003bc-ca45-4166-9eb1-5f94378f6dbe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.789838] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Updating instance '0fe61754-458c-4c5c-bb2d-2677302e5fb9' progress to 67 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 955.794474] env[63345]: DEBUG oslo_concurrency.lockutils [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.824517] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1c1221-57e5-48e4-907c-63a2628f2021 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.833278] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf8e0b5-b668-45b3-8224-2be546037fe1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.855909] env[63345]: DEBUG nova.compute.provider_tree [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.860109] env[63345]: INFO nova.compute.manager [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Took 36.44 seconds to build instance. [ 955.997291] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017532, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.097850] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Volume attach. Driver type: vmdk {{(pid=63345) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 956.098139] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226116', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'name': 'volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4868a0a0-ca35-44b0-a90c-124aa366af76', 'attached_at': '', 'detached_at': '', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'serial': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 956.099062] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ccad4b5-b5c9-4028-8f00-4fb94e1058a7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.120019] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533bd848-b157-40c0-8230-a9f36c33cdb8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.147335] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324/volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 956.148110] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26756772-2ff0-42d2-aae9-acdf61a4cfde {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.170861] env[63345]: DEBUG oslo_vmware.api [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 956.170861] env[63345]: value = "task-1017534" [ 956.170861] env[63345]: _type = "Task" [ 956.170861] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.182695] env[63345]: DEBUG oslo_vmware.api [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017534, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.202044] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.255731] env[63345]: DEBUG nova.network.neutron [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Successfully updated port: eaf797ae-2e07-4553-aaab-deed7e3f45a1 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 956.320169] env[63345]: DEBUG nova.compute.manager [req-ebc2aa34-367b-4ad6-b288-cb21d1307c99 req-0fe3c7e8-1352-488b-9e78-52f23ac723ee service nova] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Received event network-vif-plugged-eaf797ae-2e07-4553-aaab-deed7e3f45a1 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 956.320169] env[63345]: DEBUG oslo_concurrency.lockutils [req-ebc2aa34-367b-4ad6-b288-cb21d1307c99 req-0fe3c7e8-1352-488b-9e78-52f23ac723ee service nova] Acquiring lock "b3f20003-f75d-4d9f-bb4a-02d2930054a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.321047] env[63345]: DEBUG oslo_concurrency.lockutils [req-ebc2aa34-367b-4ad6-b288-cb21d1307c99 req-0fe3c7e8-1352-488b-9e78-52f23ac723ee service nova] Lock "b3f20003-f75d-4d9f-bb4a-02d2930054a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.321596] env[63345]: DEBUG oslo_concurrency.lockutils [req-ebc2aa34-367b-4ad6-b288-cb21d1307c99 req-0fe3c7e8-1352-488b-9e78-52f23ac723ee service nova] Lock "b3f20003-f75d-4d9f-bb4a-02d2930054a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.322202] env[63345]: DEBUG nova.compute.manager [req-ebc2aa34-367b-4ad6-b288-cb21d1307c99 req-0fe3c7e8-1352-488b-9e78-52f23ac723ee service nova] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] No waiting events found dispatching network-vif-plugged-eaf797ae-2e07-4553-aaab-deed7e3f45a1 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 956.322541] env[63345]: WARNING nova.compute.manager [req-ebc2aa34-367b-4ad6-b288-cb21d1307c99 req-0fe3c7e8-1352-488b-9e78-52f23ac723ee service nova] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Received unexpected event network-vif-plugged-eaf797ae-2e07-4553-aaab-deed7e3f45a1 for instance with vm_state building and task_state spawning. [ 956.339186] env[63345]: DEBUG nova.network.neutron [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Port 0cc6f455-5ad2-4802-a0ff-42268fe50023 binding to destination host cpu-1 is already ACTIVE {{(pid=63345) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 956.368014] env[63345]: DEBUG nova.scheduler.client.report [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 956.370487] env[63345]: DEBUG oslo_concurrency.lockutils [None req-296ec0e5-ca92-4761-b5ae-b004c0833030 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "a0eb9dae-0d27-419f-9210-eaa445e564c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.953s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.499147] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017532, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.684723] env[63345]: DEBUG oslo_vmware.api [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017534, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.757912] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Acquiring lock "refresh_cache-b3f20003-f75d-4d9f-bb4a-02d2930054a8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.758223] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Acquired lock "refresh_cache-b3f20003-f75d-4d9f-bb4a-02d2930054a8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.758385] env[63345]: DEBUG nova.network.neutron [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 956.782324] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "3a85df04-3997-48a3-8992-f24fe997b3cc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.782799] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "3a85df04-3997-48a3-8992-f24fe997b3cc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.783046] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "3a85df04-3997-48a3-8992-f24fe997b3cc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.783291] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "3a85df04-3997-48a3-8992-f24fe997b3cc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.783509] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "3a85df04-3997-48a3-8992-f24fe997b3cc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.790608] env[63345]: INFO nova.compute.manager [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Terminating instance [ 956.874306] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.901s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.875476] env[63345]: DEBUG oslo_concurrency.lockutils [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.081s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.878532] env[63345]: INFO nova.compute.claims [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 956.905648] env[63345]: INFO nova.scheduler.client.report [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Deleted allocations for instance 0da64b45-fa00-4fe8-8d1d-df586f27743f [ 956.942811] env[63345]: DEBUG nova.compute.manager [req-1aa1f4ab-18af-4ab9-91c2-299afc22433a req-ae1605e3-6005-42de-9f40-cae06eba8bcd service nova] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Received event network-changed-bbb77ecb-8d67-4f41-9505-2571225c8480 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 956.943094] env[63345]: DEBUG nova.compute.manager [req-1aa1f4ab-18af-4ab9-91c2-299afc22433a req-ae1605e3-6005-42de-9f40-cae06eba8bcd service nova] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Refreshing instance network info cache due to event network-changed-bbb77ecb-8d67-4f41-9505-2571225c8480. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 956.943402] env[63345]: DEBUG oslo_concurrency.lockutils [req-1aa1f4ab-18af-4ab9-91c2-299afc22433a req-ae1605e3-6005-42de-9f40-cae06eba8bcd service nova] Acquiring lock "refresh_cache-a0eb9dae-0d27-419f-9210-eaa445e564c8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.943611] env[63345]: DEBUG oslo_concurrency.lockutils [req-1aa1f4ab-18af-4ab9-91c2-299afc22433a req-ae1605e3-6005-42de-9f40-cae06eba8bcd service nova] Acquired lock "refresh_cache-a0eb9dae-0d27-419f-9210-eaa445e564c8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.943800] env[63345]: DEBUG nova.network.neutron [req-1aa1f4ab-18af-4ab9-91c2-299afc22433a req-ae1605e3-6005-42de-9f40-cae06eba8bcd service nova] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Refreshing network info cache for port bbb77ecb-8d67-4f41-9505-2571225c8480 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 956.969840] env[63345]: INFO nova.compute.manager [None req-81114f06-7e0d-46b0-997d-6d48db995641 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Resuming [ 956.970329] env[63345]: DEBUG nova.objects.instance [None req-81114f06-7e0d-46b0-997d-6d48db995641 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lazy-loading 'flavor' on Instance uuid 070a834d-6478-4705-8df0-2a27c8780507 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 957.000334] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017532, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.185866] env[63345]: DEBUG oslo_vmware.api [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017534, 'name': ReconfigVM_Task, 'duration_secs': 0.905174} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.186228] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Reconfigured VM instance instance-00000056 to attach disk [datastore1] volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324/volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 957.191795] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5872a7d8-a9b3-4615-aac8-d02201fdce57 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.209987] env[63345]: DEBUG oslo_vmware.api [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 957.209987] env[63345]: value = "task-1017535" [ 957.209987] env[63345]: _type = "Task" [ 957.209987] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.223584] env[63345]: DEBUG oslo_vmware.api [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017535, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.296139] env[63345]: DEBUG nova.compute.manager [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 957.296396] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 957.297317] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42d8b18-ad41-41ba-8e24-b51f039ef9e5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.306232] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 957.306492] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4125975b-d2fc-430f-89ef-204eeba3634d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.314992] env[63345]: DEBUG oslo_vmware.api [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 957.314992] env[63345]: value = "task-1017536" [ 957.314992] env[63345]: _type = "Task" [ 957.314992] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.324635] env[63345]: DEBUG oslo_vmware.api [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017536, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.346265] env[63345]: DEBUG nova.network.neutron [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 957.368611] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "0fe61754-458c-4c5c-bb2d-2677302e5fb9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.368899] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "0fe61754-458c-4c5c-bb2d-2677302e5fb9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.369214] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "0fe61754-458c-4c5c-bb2d-2677302e5fb9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.418072] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f77341b-1128-4d01-9b14-16da3c64545c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "0da64b45-fa00-4fe8-8d1d-df586f27743f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.052s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.503101] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017532, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.779101} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.504576] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/487d34b5-b85c-4d35-8b15-fd7347b3dcfd/487d34b5-b85c-4d35-8b15-fd7347b3dcfd.vmdk to [datastore2] 9aa651b8-317d-4153-8c33-9df0a5d16115/9aa651b8-317d-4153-8c33-9df0a5d16115.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 957.505729] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b74243b-0734-4728-be54-c49e5cd34ccc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.537293] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 9aa651b8-317d-4153-8c33-9df0a5d16115/9aa651b8-317d-4153-8c33-9df0a5d16115.vmdk or device None with type streamOptimized {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 957.537990] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-243055b1-1627-47bd-a26f-f2411eff0b0c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.561207] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 957.561207] env[63345]: value = "task-1017537" [ 957.561207] env[63345]: _type = "Task" [ 957.561207] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.570997] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017537, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.720923] env[63345]: DEBUG oslo_vmware.api [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017535, 'name': ReconfigVM_Task, 'duration_secs': 0.166771} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.721283] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226116', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'name': 'volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4868a0a0-ca35-44b0-a90c-124aa366af76', 'attached_at': '', 'detached_at': '', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'serial': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 957.832989] env[63345]: DEBUG oslo_vmware.api [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017536, 'name': PowerOffVM_Task, 'duration_secs': 0.477329} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.833320] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 957.833489] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 957.833769] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cdb7ddea-35e3-4f08-9993-2d1fbcf99a85 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.858523] env[63345]: DEBUG nova.network.neutron [req-1aa1f4ab-18af-4ab9-91c2-299afc22433a req-ae1605e3-6005-42de-9f40-cae06eba8bcd service nova] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Updated VIF entry in instance network info cache for port bbb77ecb-8d67-4f41-9505-2571225c8480. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 957.858523] env[63345]: DEBUG nova.network.neutron [req-1aa1f4ab-18af-4ab9-91c2-299afc22433a req-ae1605e3-6005-42de-9f40-cae06eba8bcd service nova] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Updating instance_info_cache with network_info: [{"id": "bbb77ecb-8d67-4f41-9505-2571225c8480", "address": "fa:16:3e:55:21:35", "network": {"id": "372a3368-2d7a-4380-b811-7ad477d85250", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-454648225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41afa63287424a549133615eb390bac7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbb77ecb-8d", "ovs_interfaceid": "bbb77ecb-8d67-4f41-9505-2571225c8480", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.895215] env[63345]: DEBUG nova.network.neutron [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Updating instance_info_cache with network_info: [{"id": "eaf797ae-2e07-4553-aaab-deed7e3f45a1", "address": "fa:16:3e:d3:74:6a", "network": {"id": "ea3c61b4-8a70-4a81-9b8c-1ed204df75b0", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-775530687-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de4d93c3db4387a4d13c86cc1c77db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf797ae-2e", "ovs_interfaceid": "eaf797ae-2e07-4553-aaab-deed7e3f45a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.925213] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 957.925587] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 957.925899] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Deleting the datastore file [datastore2] 3a85df04-3997-48a3-8992-f24fe997b3cc {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 957.926506] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2aae9865-4ffe-4a42-9580-963f63bbfe73 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.934355] env[63345]: DEBUG oslo_vmware.api [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for the task: (returnval){ [ 957.934355] env[63345]: value = "task-1017539" [ 957.934355] env[63345]: _type = "Task" [ 957.934355] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.946751] env[63345]: DEBUG oslo_vmware.api [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017539, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.982089] env[63345]: DEBUG oslo_concurrency.lockutils [None req-81114f06-7e0d-46b0-997d-6d48db995641 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.982089] env[63345]: DEBUG oslo_concurrency.lockutils [None req-81114f06-7e0d-46b0-997d-6d48db995641 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquired lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.982455] env[63345]: DEBUG nova.network.neutron [None req-81114f06-7e0d-46b0-997d-6d48db995641 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 958.073437] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017537, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.140904] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514facc7-643e-438f-b978-01688c5c8db5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.152018] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e72f5c35-3411-4a14-894e-f4eb29b73b2b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.189279] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3484c5cf-190e-4d58-a620-359e16f33de6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.198746] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273c2274-7efb-44a0-b7f3-1686c1f00102 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.214644] env[63345]: DEBUG nova.compute.provider_tree [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.360914] env[63345]: DEBUG nova.compute.manager [req-1fe536b6-adda-4b7f-b6d0-2f859ce86c83 req-c115d84c-e4d7-46f7-9687-08d1f6cafcc3 service nova] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Received event network-changed-eaf797ae-2e07-4553-aaab-deed7e3f45a1 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 958.361191] env[63345]: DEBUG nova.compute.manager [req-1fe536b6-adda-4b7f-b6d0-2f859ce86c83 req-c115d84c-e4d7-46f7-9687-08d1f6cafcc3 service nova] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Refreshing instance network info cache due to event network-changed-eaf797ae-2e07-4553-aaab-deed7e3f45a1. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 958.361422] env[63345]: DEBUG oslo_concurrency.lockutils [req-1fe536b6-adda-4b7f-b6d0-2f859ce86c83 req-c115d84c-e4d7-46f7-9687-08d1f6cafcc3 service nova] Acquiring lock "refresh_cache-b3f20003-f75d-4d9f-bb4a-02d2930054a8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.361905] env[63345]: DEBUG oslo_concurrency.lockutils [req-1aa1f4ab-18af-4ab9-91c2-299afc22433a req-ae1605e3-6005-42de-9f40-cae06eba8bcd service nova] Releasing lock "refresh_cache-a0eb9dae-0d27-419f-9210-eaa445e564c8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.397476] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Releasing lock "refresh_cache-b3f20003-f75d-4d9f-bb4a-02d2930054a8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.397817] env[63345]: DEBUG nova.compute.manager [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Instance network_info: |[{"id": "eaf797ae-2e07-4553-aaab-deed7e3f45a1", "address": "fa:16:3e:d3:74:6a", "network": {"id": "ea3c61b4-8a70-4a81-9b8c-1ed204df75b0", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-775530687-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de4d93c3db4387a4d13c86cc1c77db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf797ae-2e", "ovs_interfaceid": "eaf797ae-2e07-4553-aaab-deed7e3f45a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 958.398226] env[63345]: DEBUG oslo_concurrency.lockutils [req-1fe536b6-adda-4b7f-b6d0-2f859ce86c83 req-c115d84c-e4d7-46f7-9687-08d1f6cafcc3 service nova] Acquired lock "refresh_cache-b3f20003-f75d-4d9f-bb4a-02d2930054a8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.398452] env[63345]: DEBUG nova.network.neutron [req-1fe536b6-adda-4b7f-b6d0-2f859ce86c83 req-c115d84c-e4d7-46f7-9687-08d1f6cafcc3 service nova] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Refreshing network info cache for port eaf797ae-2e07-4553-aaab-deed7e3f45a1 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 958.400111] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:74:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9ec24851-7bb6-426b-b28f-f7b246df1713', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eaf797ae-2e07-4553-aaab-deed7e3f45a1', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 958.407791] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Creating folder: Project (68de4d93c3db4387a4d13c86cc1c77db). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 958.409161] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "refresh_cache-0fe61754-458c-4c5c-bb2d-2677302e5fb9" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.409379] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "refresh_cache-0fe61754-458c-4c5c-bb2d-2677302e5fb9" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.409560] env[63345]: DEBUG nova.network.neutron [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 958.411915] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a1d940f4-d1bd-4f77-b30a-4cb7145bacb4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.424048] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Created folder: Project (68de4d93c3db4387a4d13c86cc1c77db) in parent group-v225918. [ 958.424048] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Creating folder: Instances. Parent ref: group-v226117. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 958.424331] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e34f625-c8e2-47b3-a6af-da737771646a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.435723] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Created folder: Instances in parent group-v226117. [ 958.435980] env[63345]: DEBUG oslo.service.loopingcall [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 958.439058] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 958.439294] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-221666d2-a483-4f5d-8bed-b3316c0df1e2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.458867] env[63345]: DEBUG oslo_vmware.api [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Task: {'id': task-1017539, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.34497} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.460164] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 958.460368] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 958.460556] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 958.460736] env[63345]: INFO nova.compute.manager [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Took 1.16 seconds to destroy the instance on the hypervisor. [ 958.460971] env[63345]: DEBUG oslo.service.loopingcall [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 958.461168] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 958.461168] env[63345]: value = "task-1017542" [ 958.461168] env[63345]: _type = "Task" [ 958.461168] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.461357] env[63345]: DEBUG nova.compute.manager [-] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 958.461451] env[63345]: DEBUG nova.network.neutron [-] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 958.471199] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017542, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.573061] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017537, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.585506] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.585742] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.706029] env[63345]: DEBUG nova.network.neutron [None req-81114f06-7e0d-46b0-997d-6d48db995641 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Updating instance_info_cache with network_info: [{"id": "cf06de95-5747-4226-b66c-b9ccca47321d", "address": "fa:16:3e:9e:46:44", "network": {"id": "403ac06e-e45e-4215-bf0c-16ddd583ddc5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1349318740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac5c2a653dae436c97514507939c4e3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf06de95-57", "ovs_interfaceid": "cf06de95-5747-4226-b66c-b9ccca47321d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.717270] env[63345]: DEBUG nova.scheduler.client.report [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 958.763711] env[63345]: DEBUG nova.objects.instance [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lazy-loading 'flavor' on Instance uuid 4868a0a0-ca35-44b0-a90c-124aa366af76 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 958.978707] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017542, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.117539] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017537, 'name': ReconfigVM_Task, 'duration_secs': 1.354068} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.117539] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 9aa651b8-317d-4153-8c33-9df0a5d16115/9aa651b8-317d-4153-8c33-9df0a5d16115.vmdk or device None with type streamOptimized {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 959.117539] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'size': 0, 'encryption_format': None, 'encrypted': False, 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'disk_bus': None, 'guest_format': None, 'device_name': '/dev/sda', 'encryption_secret_uuid': None, 'image_id': '2ff49e1b-8f44-4332-bba9-777d55ff62c4'}], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226101', 'volume_id': '78ab0d16-eec6-45ab-badd-c9109f513975', 'name': 'volume-78ab0d16-eec6-45ab-badd-c9109f513975', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '9aa651b8-317d-4153-8c33-9df0a5d16115', 'attached_at': '', 'detached_at': '', 'volume_id': '78ab0d16-eec6-45ab-badd-c9109f513975', 'serial': '78ab0d16-eec6-45ab-badd-c9109f513975'}, 'delete_on_termination': False, 'mount_device': '/dev/sdb', 'device_type': None, 'boot_index': None, 'disk_bus': None, 'guest_format': None, 'attachment_id': '0681894b-ec62-4526-9435-2a32413950f8', 'volume_type': None}], 'swap': None} {{(pid=63345) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 959.117539] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Volume attach. Driver type: vmdk {{(pid=63345) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 959.117539] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226101', 'volume_id': '78ab0d16-eec6-45ab-badd-c9109f513975', 'name': 'volume-78ab0d16-eec6-45ab-badd-c9109f513975', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '9aa651b8-317d-4153-8c33-9df0a5d16115', 'attached_at': '', 'detached_at': '', 'volume_id': '78ab0d16-eec6-45ab-badd-c9109f513975', 'serial': '78ab0d16-eec6-45ab-badd-c9109f513975'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 959.117539] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783b2a0a-5741-4b27-8f46-03c8d8a2c6bd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.117539] env[63345]: DEBUG nova.compute.manager [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 959.119223] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e209a48e-af8f-4f2a-ae05-f5a4d02bdd6b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.146145] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] volume-78ab0d16-eec6-45ab-badd-c9109f513975/volume-78ab0d16-eec6-45ab-badd-c9109f513975.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 959.151815] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9abda5dd-4090-4eea-89ca-4912f6a01ca3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.170100] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 959.170100] env[63345]: value = "task-1017543" [ 959.170100] env[63345]: _type = "Task" [ 959.170100] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.178010] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017543, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.200833] env[63345]: DEBUG nova.network.neutron [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Updating instance_info_cache with network_info: [{"id": "0cc6f455-5ad2-4802-a0ff-42268fe50023", "address": "fa:16:3e:61:01:ef", "network": {"id": "80bb8388-e130-46af-a4fc-1daea51d1bf5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1343573007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "964cee117b3c4601b3afe82a8bb9c23e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0cc6f455-5a", "ovs_interfaceid": "0cc6f455-5ad2-4802-a0ff-42268fe50023", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.212567] env[63345]: DEBUG oslo_concurrency.lockutils [None req-81114f06-7e0d-46b0-997d-6d48db995641 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Releasing lock "refresh_cache-070a834d-6478-4705-8df0-2a27c8780507" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.212567] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f0f47c-d801-4c36-b71c-d56852effda5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.217782] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-81114f06-7e0d-46b0-997d-6d48db995641 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Resuming the VM {{(pid=63345) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1184}} [ 959.218305] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-047a6bda-df12-4096-98eb-c03d03ae0676 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.222052] env[63345]: DEBUG oslo_concurrency.lockutils [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.347s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.222582] env[63345]: DEBUG nova.compute.manager [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 959.226148] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.024s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.226431] env[63345]: DEBUG nova.objects.instance [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lazy-loading 'resources' on Instance uuid 49cf9c08-4024-40aa-9370-7b4f8d89e2cf {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 959.227511] env[63345]: DEBUG oslo_vmware.api [None req-81114f06-7e0d-46b0-997d-6d48db995641 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 959.227511] env[63345]: value = "task-1017544" [ 959.227511] env[63345]: _type = "Task" [ 959.227511] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.238695] env[63345]: DEBUG oslo_vmware.api [None req-81114f06-7e0d-46b0-997d-6d48db995641 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017544, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.268620] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a5bc10cf-48f8-4606-9293-5c17b32da89a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "4868a0a0-ca35-44b0-a90c-124aa366af76" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.376s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.363176] env[63345]: DEBUG nova.network.neutron [-] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.451221] env[63345]: DEBUG nova.network.neutron [req-1fe536b6-adda-4b7f-b6d0-2f859ce86c83 req-c115d84c-e4d7-46f7-9687-08d1f6cafcc3 service nova] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Updated VIF entry in instance network info cache for port eaf797ae-2e07-4553-aaab-deed7e3f45a1. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 959.451607] env[63345]: DEBUG nova.network.neutron [req-1fe536b6-adda-4b7f-b6d0-2f859ce86c83 req-c115d84c-e4d7-46f7-9687-08d1f6cafcc3 service nova] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Updating instance_info_cache with network_info: [{"id": "eaf797ae-2e07-4553-aaab-deed7e3f45a1", "address": "fa:16:3e:d3:74:6a", "network": {"id": "ea3c61b4-8a70-4a81-9b8c-1ed204df75b0", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-775530687-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68de4d93c3db4387a4d13c86cc1c77db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf797ae-2e", "ovs_interfaceid": "eaf797ae-2e07-4553-aaab-deed7e3f45a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.479017] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017542, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.615478] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.683346] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017543, 'name': ReconfigVM_Task, 'duration_secs': 0.426588} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.683673] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Reconfigured VM instance instance-00000042 to attach disk [datastore2] volume-78ab0d16-eec6-45ab-badd-c9109f513975/volume-78ab0d16-eec6-45ab-badd-c9109f513975.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 959.691421] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43361067-c873-4069-af80-4605026f10a2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.706590] env[63345]: DEBUG oslo_concurrency.lockutils [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "refresh_cache-0fe61754-458c-4c5c-bb2d-2677302e5fb9" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.722918] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 959.722918] env[63345]: value = "task-1017545" [ 959.722918] env[63345]: _type = "Task" [ 959.722918] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.731704] env[63345]: DEBUG nova.compute.utils [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 959.736632] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017545, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.737330] env[63345]: DEBUG nova.compute.manager [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 959.737545] env[63345]: DEBUG nova.network.neutron [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 959.751895] env[63345]: DEBUG oslo_vmware.api [None req-81114f06-7e0d-46b0-997d-6d48db995641 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017544, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.844247] env[63345]: DEBUG nova.policy [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb6730bb6292421e8f943bce2e912bef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c535ae9067ab4e8a87e95c68af4624fb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 959.869419] env[63345]: INFO nova.compute.manager [-] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Took 1.41 seconds to deallocate network for instance. [ 959.954356] env[63345]: DEBUG oslo_concurrency.lockutils [req-1fe536b6-adda-4b7f-b6d0-2f859ce86c83 req-c115d84c-e4d7-46f7-9687-08d1f6cafcc3 service nova] Releasing lock "refresh_cache-b3f20003-f75d-4d9f-bb4a-02d2930054a8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.971922] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a9d893-89ab-4e5a-ab8c-dccb69caed1b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.979666] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017542, 'name': CreateVM_Task, 'duration_secs': 1.252386} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.981501] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 959.982338] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.982566] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.982863] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 959.984466] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a9c45d8-8687-495b-bcdb-bd81bf0afbe2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.988168] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0673bd1a-5a2e-4a6b-98d6-1c2aa3cccc99 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.023132] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6530eef-ef43-4c1b-92fb-70dd7672e5ad {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.026317] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the task: (returnval){ [ 960.026317] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52171922-6ffd-3942-60e9-33d8099236f0" [ 960.026317] env[63345]: _type = "Task" [ 960.026317] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.037477] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6bed5f9-2e82-4a17-9747-ee8f79cc7e5a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.041994] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52171922-6ffd-3942-60e9-33d8099236f0, 'name': SearchDatastore_Task, 'duration_secs': 0.020913} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.042782] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.043066] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 960.043405] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.043567] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.043757] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 960.044744] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5c06c9e-4278-45e6-874f-0bc1d8c4c000 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.055737] env[63345]: DEBUG nova.compute.provider_tree [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 960.065928] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 960.066165] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 960.066896] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00d69c91-6962-447a-be28-f32a6f80db52 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.072300] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the task: (returnval){ [ 960.072300] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529fd2c4-5fbe-e933-4dbc-57079f4c6428" [ 960.072300] env[63345]: _type = "Task" [ 960.072300] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.080744] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]529fd2c4-5fbe-e933-4dbc-57079f4c6428, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.095718] env[63345]: INFO nova.compute.manager [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Rebuilding instance [ 960.183151] env[63345]: DEBUG nova.compute.manager [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 960.184118] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee81345-f23e-43cf-84a6-a7b6f89b8769 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.237424] env[63345]: DEBUG nova.compute.manager [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 960.237683] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017545, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.250961] env[63345]: DEBUG oslo_vmware.api [None req-81114f06-7e0d-46b0-997d-6d48db995641 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017544, 'name': PowerOnVM_Task, 'duration_secs': 0.567045} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.250961] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-81114f06-7e0d-46b0-997d-6d48db995641 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Resumed the VM {{(pid=63345) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1189}} [ 960.250961] env[63345]: DEBUG nova.compute.manager [None req-81114f06-7e0d-46b0-997d-6d48db995641 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 960.250961] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb20ecb0-8ca7-432f-b807-daa30c7d49ea {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.257353] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b0b045-19e8-4eb9-8ace-28b97fca03f2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.282178] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02f70cf-de4c-4929-9a36-d92efc6bb238 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.288747] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Updating instance '0fe61754-458c-4c5c-bb2d-2677302e5fb9' progress to 83 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 960.377186] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.420537] env[63345]: DEBUG nova.network.neutron [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Successfully created port: 4b958d52-f058-41bc-a29e-1c8a2749ac18 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 960.478180] env[63345]: DEBUG nova.compute.manager [req-8219ce5f-6600-4616-a99a-c45c95115a28 req-5e8f7510-19ca-4f6b-bdb5-82400e28baa5 service nova] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Received event network-vif-deleted-a44ad561-3547-45fd-a941-c72ff5211989 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 960.562503] env[63345]: DEBUG nova.scheduler.client.report [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 960.583628] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]529fd2c4-5fbe-e933-4dbc-57079f4c6428, 'name': SearchDatastore_Task, 'duration_secs': 0.009279} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.584828] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0adb0d4-9468-41bd-a754-659801cf5d54 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.591113] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the task: (returnval){ [ 960.591113] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]527c5ac1-545f-0ec6-ab65-76ee1dd6bf26" [ 960.591113] env[63345]: _type = "Task" [ 960.591113] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.598938] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]527c5ac1-545f-0ec6-ab65-76ee1dd6bf26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.731830] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017545, 'name': ReconfigVM_Task, 'duration_secs': 0.543673} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.732519] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226101', 'volume_id': '78ab0d16-eec6-45ab-badd-c9109f513975', 'name': 'volume-78ab0d16-eec6-45ab-badd-c9109f513975', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '9aa651b8-317d-4153-8c33-9df0a5d16115', 'attached_at': '', 'detached_at': '', 'volume_id': '78ab0d16-eec6-45ab-badd-c9109f513975', 'serial': '78ab0d16-eec6-45ab-badd-c9109f513975'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 960.733252] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5dc84599-fa19-42f4-a57d-5e7e0a935d1a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.739958] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 960.739958] env[63345]: value = "task-1017546" [ 960.739958] env[63345]: _type = "Task" [ 960.739958] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.752095] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017546, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.802945] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 960.802945] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5dc296a7-f068-4074-a67b-b64f21ea03c6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.811071] env[63345]: DEBUG oslo_vmware.api [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 960.811071] env[63345]: value = "task-1017547" [ 960.811071] env[63345]: _type = "Task" [ 960.811071] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.822065] env[63345]: DEBUG oslo_vmware.api [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017547, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.957185] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "dd624e54-bd5b-4660-88a1-9d6f36560421" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.958109] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "dd624e54-bd5b-4660-88a1-9d6f36560421" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.064742] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.838s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.069480] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.452s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.071603] env[63345]: INFO nova.compute.claims [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 961.096434] env[63345]: INFO nova.scheduler.client.report [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleted allocations for instance 49cf9c08-4024-40aa-9370-7b4f8d89e2cf [ 961.105603] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]527c5ac1-545f-0ec6-ab65-76ee1dd6bf26, 'name': SearchDatastore_Task, 'duration_secs': 0.049149} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.105905] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.106255] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] b3f20003-f75d-4d9f-bb4a-02d2930054a8/b3f20003-f75d-4d9f-bb4a-02d2930054a8.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 961.106522] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0fd1a2b2-ed71-4b7c-a1ba-27e67db76e3e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.114227] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the task: (returnval){ [ 961.114227] env[63345]: value = "task-1017548" [ 961.114227] env[63345]: _type = "Task" [ 961.114227] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.122808] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017548, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.200912] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 961.201275] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-91563a96-aff8-4ec1-b7f7-1a6fee731736 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.208516] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 961.208516] env[63345]: value = "task-1017549" [ 961.208516] env[63345]: _type = "Task" [ 961.208516] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.219719] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017549, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.250766] env[63345]: DEBUG nova.compute.manager [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 961.253252] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017546, 'name': Rename_Task, 'duration_secs': 0.344112} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.253732] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 961.253991] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e6f13ac-dee1-45c9-9bb1-bba7892f830e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.260265] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 961.260265] env[63345]: value = "task-1017550" [ 961.260265] env[63345]: _type = "Task" [ 961.260265] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.273109] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017550, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.282122] env[63345]: DEBUG nova.virt.hardware [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 961.282395] env[63345]: DEBUG nova.virt.hardware [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 961.282572] env[63345]: DEBUG nova.virt.hardware [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 961.282759] env[63345]: DEBUG nova.virt.hardware [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 961.282983] env[63345]: DEBUG nova.virt.hardware [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 961.283124] env[63345]: DEBUG nova.virt.hardware [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 961.283349] env[63345]: DEBUG nova.virt.hardware [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 961.283554] env[63345]: DEBUG nova.virt.hardware [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 961.283743] env[63345]: DEBUG nova.virt.hardware [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 961.283922] env[63345]: DEBUG nova.virt.hardware [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 961.284129] env[63345]: DEBUG nova.virt.hardware [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 961.285221] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faaa5d3a-665e-4526-9133-c0213711ec40 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.294739] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-584f63b7-a140-44a2-96b1-2bc0696d1f50 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.320788] env[63345]: DEBUG oslo_vmware.api [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017547, 'name': PowerOnVM_Task, 'duration_secs': 0.398113} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.321143] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 961.321362] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-59d24ef3-a2cd-44e1-8e65-fdab3875baf1 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Updating instance '0fe61754-458c-4c5c-bb2d-2677302e5fb9' progress to 100 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 961.462814] env[63345]: DEBUG nova.compute.manager [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 961.606041] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a1f2b856-da93-440b-8c8e-b899d0f507c7 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "49cf9c08-4024-40aa-9370-7b4f8d89e2cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.938s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.627328] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017548, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.722901] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017549, 'name': PowerOffVM_Task, 'duration_secs': 0.249006} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.722901] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 961.769085] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "c84c8b9a-9164-4dd7-b094-dd09c15c6f21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.769473] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "c84c8b9a-9164-4dd7-b094-dd09c15c6f21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.769785] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "c84c8b9a-9164-4dd7-b094-dd09c15c6f21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.770061] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "c84c8b9a-9164-4dd7-b094-dd09c15c6f21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.770280] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "c84c8b9a-9164-4dd7-b094-dd09c15c6f21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.776322] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017550, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.777028] env[63345]: INFO nova.compute.manager [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Terminating instance [ 961.787557] env[63345]: INFO nova.compute.manager [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Detaching volume f3eb7f29-d3fd-4c7d-ab93-1582eb175324 [ 961.826296] env[63345]: INFO nova.virt.block_device [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Attempting to driver detach volume f3eb7f29-d3fd-4c7d-ab93-1582eb175324 from mountpoint /dev/sdb [ 961.826577] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Volume detach. Driver type: vmdk {{(pid=63345) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 961.826776] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226116', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'name': 'volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4868a0a0-ca35-44b0-a90c-124aa366af76', 'attached_at': '', 'detached_at': '', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'serial': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 961.832710] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85227d82-3912-43de-b431-f851013ecc9b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.864436] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a983c288-dd30-4d4c-93d2-552600ab0cbc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.874435] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411f1719-b2f4-4828-995a-3345b1c3bfe2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.900610] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-462739eb-1a0c-4a8c-ac51-4d08f629e8e8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.915696] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] The volume has not been displaced from its original location: [datastore1] volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324/volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324.vmdk. No consolidation needed. {{(pid=63345) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 961.921054] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Reconfiguring VM instance instance-00000056 to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 961.921678] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99cb1d5e-edd1-47cf-b5e1-708221cf6727 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.939469] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 961.939469] env[63345]: value = "task-1017551" [ 961.939469] env[63345]: _type = "Task" [ 961.939469] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.947512] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017551, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.988176] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.124680] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017548, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.842273} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.126944] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] b3f20003-f75d-4d9f-bb4a-02d2930054a8/b3f20003-f75d-4d9f-bb4a-02d2930054a8.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 962.127187] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 962.128188] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d75616d6-816b-4fc0-8af8-b50ee6f7e815 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.133495] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the task: (returnval){ [ 962.133495] env[63345]: value = "task-1017552" [ 962.133495] env[63345]: _type = "Task" [ 962.133495] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.142067] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017552, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.273714] env[63345]: DEBUG oslo_vmware.api [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017550, 'name': PowerOnVM_Task, 'duration_secs': 0.615549} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.274016] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 962.281659] env[63345]: DEBUG nova.compute.manager [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 962.282395] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 962.282720] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4b260d-870b-4b30-bd7a-bc8a0097ec74 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.289774] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 962.290846] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cad302c3-9afb-4cff-bb1f-53ed697856f8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.292805] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbade7e9-9fe0-4ca5-9cb8-ce8e5417a01d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.302618] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5ccff0-5d14-4b52-965f-6c3d64824403 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.306388] env[63345]: DEBUG oslo_vmware.api [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 962.306388] env[63345]: value = "task-1017553" [ 962.306388] env[63345]: _type = "Task" [ 962.306388] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.334180] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95d8991-fd2c-4494-9132-083fe675d763 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.343767] env[63345]: DEBUG oslo_vmware.api [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017553, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.348665] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd81577-f574-4f5e-95c6-89abaeb192c7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.362934] env[63345]: DEBUG nova.compute.provider_tree [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.406558] env[63345]: DEBUG nova.compute.manager [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 962.407502] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fafcdbd7-6102-48ed-82a8-d545cbdff6ce {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.446897] env[63345]: DEBUG nova.network.neutron [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Successfully updated port: 4b958d52-f058-41bc-a29e-1c8a2749ac18 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 962.457799] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017551, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.548954] env[63345]: DEBUG nova.compute.manager [req-6eb2d354-ba1a-45b1-81b9-a3ab548e5c48 req-53b0701a-2507-4185-b63a-03d8ea7ea13c service nova] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Received event network-vif-plugged-4b958d52-f058-41bc-a29e-1c8a2749ac18 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 962.549049] env[63345]: DEBUG oslo_concurrency.lockutils [req-6eb2d354-ba1a-45b1-81b9-a3ab548e5c48 req-53b0701a-2507-4185-b63a-03d8ea7ea13c service nova] Acquiring lock "732ac30c-15c1-4c57-bb70-ea3ed51f646b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.549201] env[63345]: DEBUG oslo_concurrency.lockutils [req-6eb2d354-ba1a-45b1-81b9-a3ab548e5c48 req-53b0701a-2507-4185-b63a-03d8ea7ea13c service nova] Lock "732ac30c-15c1-4c57-bb70-ea3ed51f646b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.549385] env[63345]: DEBUG oslo_concurrency.lockutils [req-6eb2d354-ba1a-45b1-81b9-a3ab548e5c48 req-53b0701a-2507-4185-b63a-03d8ea7ea13c service nova] Lock "732ac30c-15c1-4c57-bb70-ea3ed51f646b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.549701] env[63345]: DEBUG nova.compute.manager [req-6eb2d354-ba1a-45b1-81b9-a3ab548e5c48 req-53b0701a-2507-4185-b63a-03d8ea7ea13c service nova] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] No waiting events found dispatching network-vif-plugged-4b958d52-f058-41bc-a29e-1c8a2749ac18 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 962.549817] env[63345]: WARNING nova.compute.manager [req-6eb2d354-ba1a-45b1-81b9-a3ab548e5c48 req-53b0701a-2507-4185-b63a-03d8ea7ea13c service nova] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Received unexpected event network-vif-plugged-4b958d52-f058-41bc-a29e-1c8a2749ac18 for instance with vm_state building and task_state spawning. [ 962.550228] env[63345]: DEBUG nova.compute.manager [req-6eb2d354-ba1a-45b1-81b9-a3ab548e5c48 req-53b0701a-2507-4185-b63a-03d8ea7ea13c service nova] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Received event network-changed-4b958d52-f058-41bc-a29e-1c8a2749ac18 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 962.550392] env[63345]: DEBUG nova.compute.manager [req-6eb2d354-ba1a-45b1-81b9-a3ab548e5c48 req-53b0701a-2507-4185-b63a-03d8ea7ea13c service nova] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Refreshing instance network info cache due to event network-changed-4b958d52-f058-41bc-a29e-1c8a2749ac18. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 962.550622] env[63345]: DEBUG oslo_concurrency.lockutils [req-6eb2d354-ba1a-45b1-81b9-a3ab548e5c48 req-53b0701a-2507-4185-b63a-03d8ea7ea13c service nova] Acquiring lock "refresh_cache-732ac30c-15c1-4c57-bb70-ea3ed51f646b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.550775] env[63345]: DEBUG oslo_concurrency.lockutils [req-6eb2d354-ba1a-45b1-81b9-a3ab548e5c48 req-53b0701a-2507-4185-b63a-03d8ea7ea13c service nova] Acquired lock "refresh_cache-732ac30c-15c1-4c57-bb70-ea3ed51f646b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.550947] env[63345]: DEBUG nova.network.neutron [req-6eb2d354-ba1a-45b1-81b9-a3ab548e5c48 req-53b0701a-2507-4185-b63a-03d8ea7ea13c service nova] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Refreshing network info cache for port 4b958d52-f058-41bc-a29e-1c8a2749ac18 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 962.645584] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017552, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066476} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.646238] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 962.646917] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b359f5-b5fc-4e74-8c0b-5f9b13c4ec34 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.668739] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] b3f20003-f75d-4d9f-bb4a-02d2930054a8/b3f20003-f75d-4d9f-bb4a-02d2930054a8.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 962.669038] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-468b66cb-5923-45fc-9b99-ef81fd9e6868 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.690040] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the task: (returnval){ [ 962.690040] env[63345]: value = "task-1017554" [ 962.690040] env[63345]: _type = "Task" [ 962.690040] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.698820] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017554, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.816954] env[63345]: DEBUG oslo_vmware.api [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017553, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.866656] env[63345]: DEBUG nova.scheduler.client.report [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 962.930753] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8c4bcace-af10-4e64-9b54-212d2089f325 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "9aa651b8-317d-4153-8c33-9df0a5d16115" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 52.668s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.952362] env[63345]: DEBUG oslo_concurrency.lockutils [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "refresh_cache-732ac30c-15c1-4c57-bb70-ea3ed51f646b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.952933] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017551, 'name': ReconfigVM_Task, 'duration_secs': 0.572626} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.953240] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Reconfigured VM instance instance-00000056 to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 962.957975] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67536efe-da86-4ce0-825b-bb5dec49d5e4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.975648] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 962.975648] env[63345]: value = "task-1017555" [ 962.975648] env[63345]: _type = "Task" [ 962.975648] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.985912] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017555, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.119333] env[63345]: DEBUG nova.network.neutron [req-6eb2d354-ba1a-45b1-81b9-a3ab548e5c48 req-53b0701a-2507-4185-b63a-03d8ea7ea13c service nova] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 963.201994] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017554, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.309233] env[63345]: DEBUG nova.network.neutron [req-6eb2d354-ba1a-45b1-81b9-a3ab548e5c48 req-53b0701a-2507-4185-b63a-03d8ea7ea13c service nova] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.322471] env[63345]: DEBUG oslo_vmware.api [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017553, 'name': PowerOffVM_Task, 'duration_secs': 0.516749} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.322834] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 963.323089] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 963.323405] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c038fbf1-d65d-40b9-9457-0fa455078d9a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.372305] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.305s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.373022] env[63345]: DEBUG nova.compute.manager [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 963.376679] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.377011] env[63345]: DEBUG nova.objects.instance [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lazy-loading 'resources' on Instance uuid 3a85df04-3997-48a3-8992-f24fe997b3cc {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 963.402677] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 963.402901] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 963.403647] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleting the datastore file [datastore2] c84c8b9a-9164-4dd7-b094-dd09c15c6f21 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 963.403647] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7b5e0a4-8efe-4a08-a9fe-999122448e0c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.412125] env[63345]: DEBUG oslo_vmware.api [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 963.412125] env[63345]: value = "task-1017557" [ 963.412125] env[63345]: _type = "Task" [ 963.412125] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.422370] env[63345]: DEBUG oslo_vmware.api [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017557, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.488653] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017555, 'name': ReconfigVM_Task, 'duration_secs': 0.155991} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.489547] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226116', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'name': 'volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4868a0a0-ca35-44b0-a90c-124aa366af76', 'attached_at': '', 'detached_at': '', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'serial': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 963.701113] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017554, 'name': ReconfigVM_Task, 'duration_secs': 0.518685} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.701256] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Reconfigured VM instance instance-0000005c to attach disk [datastore1] b3f20003-f75d-4d9f-bb4a-02d2930054a8/b3f20003-f75d-4d9f-bb4a-02d2930054a8.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 963.701901] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49b07571-ae0d-4cec-a693-2ceecdfbba39 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.710990] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the task: (returnval){ [ 963.710990] env[63345]: value = "task-1017558" [ 963.710990] env[63345]: _type = "Task" [ 963.710990] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.717083] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017558, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.816132] env[63345]: DEBUG oslo_concurrency.lockutils [req-6eb2d354-ba1a-45b1-81b9-a3ab548e5c48 req-53b0701a-2507-4185-b63a-03d8ea7ea13c service nova] Releasing lock "refresh_cache-732ac30c-15c1-4c57-bb70-ea3ed51f646b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.816555] env[63345]: DEBUG oslo_concurrency.lockutils [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "refresh_cache-732ac30c-15c1-4c57-bb70-ea3ed51f646b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.816722] env[63345]: DEBUG nova.network.neutron [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 963.880792] env[63345]: DEBUG nova.compute.utils [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 963.885652] env[63345]: DEBUG nova.compute.manager [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 963.885867] env[63345]: DEBUG nova.network.neutron [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 963.926329] env[63345]: DEBUG oslo_vmware.api [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017557, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201982} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.926646] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 963.926859] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 963.927068] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 963.927267] env[63345]: INFO nova.compute.manager [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Took 1.65 seconds to destroy the instance on the hypervisor. [ 963.927570] env[63345]: DEBUG oslo.service.loopingcall [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 963.927834] env[63345]: DEBUG nova.compute.manager [-] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 963.927834] env[63345]: DEBUG nova.network.neutron [-] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 964.104138] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd5d5c59-28a0-4d21-ba60-73f7c158f6d8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.107014] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "0fe61754-458c-4c5c-bb2d-2677302e5fb9" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.107266] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "0fe61754-458c-4c5c-bb2d-2677302e5fb9" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.107458] env[63345]: DEBUG nova.compute.manager [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Going to confirm migration 3 {{(pid=63345) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5112}} [ 964.116184] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce316c17-944e-43cb-874f-4392da64a7b0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.148432] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf51e08-5983-49b2-b69c-1368ac3051ab {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.152286] env[63345]: DEBUG nova.policy [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f30f1732c89e4f1d87bc564854295c21', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4560e378b6aa47a3bbb5a2f7c5b76f5f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 964.160253] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5cf565e-c718-4e14-9d76-1ce8448f760b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.177733] env[63345]: DEBUG nova.compute.provider_tree [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 964.220351] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017558, 'name': Rename_Task, 'duration_secs': 0.191149} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.220893] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 964.221278] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18602f41-761f-49ef-a08c-c1f442bdebc4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.229427] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the task: (returnval){ [ 964.229427] env[63345]: value = "task-1017559" [ 964.229427] env[63345]: _type = "Task" [ 964.229427] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.240397] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017559, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.242778] env[63345]: DEBUG nova.compute.manager [req-d4861d97-c1ed-43a6-b55c-270e56642aa4 req-602605e6-8823-47d5-948b-3ad6f4a470c6 service nova] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Received event network-vif-deleted-2ddc051b-7b20-4e01-8f07-b53ddf9efada {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 964.243008] env[63345]: INFO nova.compute.manager [req-d4861d97-c1ed-43a6-b55c-270e56642aa4 req-602605e6-8823-47d5-948b-3ad6f4a470c6 service nova] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Neutron deleted interface 2ddc051b-7b20-4e01-8f07-b53ddf9efada; detaching it from the instance and deleting it from the info cache [ 964.243321] env[63345]: DEBUG nova.network.neutron [req-d4861d97-c1ed-43a6-b55c-270e56642aa4 req-602605e6-8823-47d5-948b-3ad6f4a470c6 service nova] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.350935] env[63345]: DEBUG nova.network.neutron [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 964.386017] env[63345]: DEBUG nova.compute.manager [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 964.501401] env[63345]: DEBUG nova.network.neutron [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Updating instance_info_cache with network_info: [{"id": "4b958d52-f058-41bc-a29e-1c8a2749ac18", "address": "fa:16:3e:c1:c5:58", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b958d52-f0", "ovs_interfaceid": "4b958d52-f058-41bc-a29e-1c8a2749ac18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.555140] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 964.555465] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b395414-22c6-4930-a8b3-07657e1fa2a5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.563589] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 964.563589] env[63345]: value = "task-1017560" [ 964.563589] env[63345]: _type = "Task" [ 964.563589] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.569059] env[63345]: DEBUG nova.network.neutron [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Successfully created port: 267ad158-547a-4d3a-a838-3d964626d731 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 964.577364] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] VM already powered off {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 964.577599] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Volume detach. Driver type: vmdk {{(pid=63345) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 964.577802] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226116', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'name': 'volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4868a0a0-ca35-44b0-a90c-124aa366af76', 'attached_at': '', 'detached_at': '', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'serial': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 964.578614] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b310787a-d9c3-4834-8ada-782a1df9b227 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.599537] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d85651-5008-4421-9809-efc6339af3fc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.608157] env[63345]: WARNING nova.virt.vmwareapi.driver [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 964.608157] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 964.608436] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4993b3-4cd4-4638-a2a4-807827998dfe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.618188] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 964.618451] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2919ea07-7198-4fa4-a528-80f24835496e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.687063] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "refresh_cache-0fe61754-458c-4c5c-bb2d-2677302e5fb9" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.687063] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "refresh_cache-0fe61754-458c-4c5c-bb2d-2677302e5fb9" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.687063] env[63345]: DEBUG nova.network.neutron [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 964.687063] env[63345]: DEBUG nova.objects.instance [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lazy-loading 'info_cache' on Instance uuid 0fe61754-458c-4c5c-bb2d-2677302e5fb9 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 964.706025] env[63345]: ERROR nova.scheduler.client.report [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] [req-76b83414-e7a1-4491-b8de-43ec66de5b1e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-76b83414-e7a1-4491-b8de-43ec66de5b1e"}]} [ 964.718377] env[63345]: DEBUG nova.network.neutron [-] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.718687] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 964.718735] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 964.719468] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleting the datastore file [datastore2] 4868a0a0-ca35-44b0-a90c-124aa366af76 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 964.719681] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a8f8882-afc6-4456-abda-7a43f637a410 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.727912] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 964.727912] env[63345]: value = "task-1017562" [ 964.727912] env[63345]: _type = "Task" [ 964.727912] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.737593] env[63345]: DEBUG nova.scheduler.client.report [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 964.743672] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017562, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.745600] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2d113e20-5ee6-4a13-97a3-af3fad5293ae {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.752038] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017559, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.758917] env[63345]: DEBUG nova.scheduler.client.report [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 964.759232] env[63345]: DEBUG nova.compute.provider_tree [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 964.765576] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e670296c-5cb8-457b-be3a-844717714d5a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.800733] env[63345]: DEBUG nova.compute.manager [req-d4861d97-c1ed-43a6-b55c-270e56642aa4 req-602605e6-8823-47d5-948b-3ad6f4a470c6 service nova] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Detach interface failed, port_id=2ddc051b-7b20-4e01-8f07-b53ddf9efada, reason: Instance c84c8b9a-9164-4dd7-b094-dd09c15c6f21 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 964.802162] env[63345]: DEBUG nova.scheduler.client.report [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 964.822700] env[63345]: DEBUG nova.scheduler.client.report [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 965.004430] env[63345]: DEBUG oslo_concurrency.lockutils [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "refresh_cache-732ac30c-15c1-4c57-bb70-ea3ed51f646b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.004772] env[63345]: DEBUG nova.compute.manager [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Instance network_info: |[{"id": "4b958d52-f058-41bc-a29e-1c8a2749ac18", "address": "fa:16:3e:c1:c5:58", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b958d52-f0", "ovs_interfaceid": "4b958d52-f058-41bc-a29e-1c8a2749ac18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 965.005278] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:c5:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f77ff7a1-209c-4f3f-b2a0-fd817741e739', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b958d52-f058-41bc-a29e-1c8a2749ac18', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 965.013888] env[63345]: DEBUG oslo.service.loopingcall [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 965.017061] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 965.017719] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56823cba-c146-4134-821a-73d34b4ccd84 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.041721] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 965.041721] env[63345]: value = "task-1017563" [ 965.041721] env[63345]: _type = "Task" [ 965.041721] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.052798] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017563, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.070338] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b3d9f7-8890-4757-9b78-0e0b3ee5cd90 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.078547] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9497971-b211-4bb8-b996-d2bf8061817c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.114539] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c64818a2-887f-4a2d-a9dc-31e2107cb3e0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.122492] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561f9c75-4182-4c7e-a959-3bfae441966b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.136741] env[63345]: DEBUG nova.compute.provider_tree [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 965.220045] env[63345]: INFO nova.compute.manager [-] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Took 1.29 seconds to deallocate network for instance. [ 965.240315] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017562, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.245837} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.243436] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 965.243703] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 965.243848] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 965.246418] env[63345]: DEBUG oslo_vmware.api [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017559, 'name': PowerOnVM_Task, 'duration_secs': 0.788451} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.246955] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 965.247176] env[63345]: INFO nova.compute.manager [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Took 10.13 seconds to spawn the instance on the hypervisor. [ 965.247427] env[63345]: DEBUG nova.compute.manager [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 965.248242] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355b2e37-3b2f-4037-98b0-6a24e104ae78 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.399650] env[63345]: DEBUG nova.compute.manager [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 965.427397] env[63345]: DEBUG nova.virt.hardware [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 965.427721] env[63345]: DEBUG nova.virt.hardware [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 965.427900] env[63345]: DEBUG nova.virt.hardware [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 965.428126] env[63345]: DEBUG nova.virt.hardware [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 965.428288] env[63345]: DEBUG nova.virt.hardware [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 965.428583] env[63345]: DEBUG nova.virt.hardware [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 965.428824] env[63345]: DEBUG nova.virt.hardware [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 965.429013] env[63345]: DEBUG nova.virt.hardware [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 965.429243] env[63345]: DEBUG nova.virt.hardware [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 965.429437] env[63345]: DEBUG nova.virt.hardware [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 965.429638] env[63345]: DEBUG nova.virt.hardware [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 965.430622] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c553ff-b393-42b8-877d-8a2af3d09e42 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.439323] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2971559e-abd1-4abe-b12c-5779d40d5794 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.551912] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017563, 'name': CreateVM_Task, 'duration_secs': 0.450414} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.552119] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 965.552850] env[63345]: DEBUG oslo_concurrency.lockutils [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.553982] env[63345]: DEBUG oslo_concurrency.lockutils [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.553982] env[63345]: DEBUG oslo_concurrency.lockutils [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 965.553982] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c78185a-2211-4a4a-b19f-f2219f3ecd56 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.560031] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 965.560031] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]523b7100-ecf8-b2a3-b84e-b2c9d2f7a35d" [ 965.560031] env[63345]: _type = "Task" [ 965.560031] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.568594] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]523b7100-ecf8-b2a3-b84e-b2c9d2f7a35d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.670162] env[63345]: DEBUG nova.scheduler.client.report [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 124 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 965.670162] env[63345]: DEBUG nova.compute.provider_tree [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 124 to 125 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 965.670162] env[63345]: DEBUG nova.compute.provider_tree [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 965.726252] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.749285] env[63345]: INFO nova.virt.block_device [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Booting with volume f3eb7f29-d3fd-4c7d-ab93-1582eb175324 at /dev/sdb [ 965.767722] env[63345]: INFO nova.compute.manager [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Took 20.27 seconds to build instance. [ 965.788561] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-30642668-bdb5-41da-97c4-4e6e052ae0ea {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.801566] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5494499-a8fc-4123-baf3-925063a5fb17 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.840307] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7dc038a7-b9e8-407a-8d95-f6e6c463f6c3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.850029] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163ac0e2-e01b-40c1-9d40-887295ac4eb3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.885479] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71eedb17-7703-4fb5-b7d6-288aa9d7ea9f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.892020] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43634f2-2495-46ec-9c99-4b05a42ade3c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.906194] env[63345]: DEBUG nova.virt.block_device [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Updating existing volume attachment record: 1fc9cca8-87f3-4f4a-96cd-60ec4383aef1 {{(pid=63345) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 965.933859] env[63345]: DEBUG nova.network.neutron [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Updating instance_info_cache with network_info: [{"id": "0cc6f455-5ad2-4802-a0ff-42268fe50023", "address": "fa:16:3e:61:01:ef", "network": {"id": "80bb8388-e130-46af-a4fc-1daea51d1bf5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1343573007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "964cee117b3c4601b3afe82a8bb9c23e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0cc6f455-5a", "ovs_interfaceid": "0cc6f455-5ad2-4802-a0ff-42268fe50023", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.076602] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]523b7100-ecf8-b2a3-b84e-b2c9d2f7a35d, 'name': SearchDatastore_Task, 'duration_secs': 0.026922} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.077319] env[63345]: DEBUG oslo_concurrency.lockutils [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.077568] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 966.077809] env[63345]: DEBUG oslo_concurrency.lockutils [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.077960] env[63345]: DEBUG oslo_concurrency.lockutils [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.078163] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 966.078449] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3a67ad4-06fb-432b-a361-e7def40bfdd3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.103976] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 966.104471] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 966.104959] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93150cea-2011-4d1c-aa78-74c10b404461 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.110900] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 966.110900] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]522e7118-c6cb-9827-aa04-9a3060e744cb" [ 966.110900] env[63345]: _type = "Task" [ 966.110900] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.119391] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]522e7118-c6cb-9827-aa04-9a3060e744cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.175721] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.799s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.178232] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.190s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.179850] env[63345]: INFO nova.compute.claims [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 966.194200] env[63345]: DEBUG nova.compute.manager [req-33c8d9aa-cc78-4893-b07e-7a9657dc88d7 req-606f815d-c91b-40f7-a25e-d77a15bf2927 service nova] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Received event network-vif-plugged-267ad158-547a-4d3a-a838-3d964626d731 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 966.194936] env[63345]: DEBUG oslo_concurrency.lockutils [req-33c8d9aa-cc78-4893-b07e-7a9657dc88d7 req-606f815d-c91b-40f7-a25e-d77a15bf2927 service nova] Acquiring lock "22a11cf9-8f85-4371-98eb-25b267c9aff7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.194936] env[63345]: DEBUG oslo_concurrency.lockutils [req-33c8d9aa-cc78-4893-b07e-7a9657dc88d7 req-606f815d-c91b-40f7-a25e-d77a15bf2927 service nova] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.194936] env[63345]: DEBUG oslo_concurrency.lockutils [req-33c8d9aa-cc78-4893-b07e-7a9657dc88d7 req-606f815d-c91b-40f7-a25e-d77a15bf2927 service nova] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.195165] env[63345]: DEBUG nova.compute.manager [req-33c8d9aa-cc78-4893-b07e-7a9657dc88d7 req-606f815d-c91b-40f7-a25e-d77a15bf2927 service nova] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] No waiting events found dispatching network-vif-plugged-267ad158-547a-4d3a-a838-3d964626d731 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 966.195398] env[63345]: WARNING nova.compute.manager [req-33c8d9aa-cc78-4893-b07e-7a9657dc88d7 req-606f815d-c91b-40f7-a25e-d77a15bf2927 service nova] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Received unexpected event network-vif-plugged-267ad158-547a-4d3a-a838-3d964626d731 for instance with vm_state building and task_state spawning. [ 966.208250] env[63345]: INFO nova.scheduler.client.report [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Deleted allocations for instance 3a85df04-3997-48a3-8992-f24fe997b3cc [ 966.269980] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "070a834d-6478-4705-8df0-2a27c8780507" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.269980] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "070a834d-6478-4705-8df0-2a27c8780507" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.269980] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "070a834d-6478-4705-8df0-2a27c8780507-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.270345] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "070a834d-6478-4705-8df0-2a27c8780507-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.270345] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "070a834d-6478-4705-8df0-2a27c8780507-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.272197] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4a948e2f-8352-429d-b9ec-8e81a8bb2619 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Lock "b3f20003-f75d-4d9f-bb4a-02d2930054a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.791s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.272849] env[63345]: INFO nova.compute.manager [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Terminating instance [ 966.436561] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "refresh_cache-0fe61754-458c-4c5c-bb2d-2677302e5fb9" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.436825] env[63345]: DEBUG nova.objects.instance [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lazy-loading 'migration_context' on Instance uuid 0fe61754-458c-4c5c-bb2d-2677302e5fb9 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 966.621661] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]522e7118-c6cb-9827-aa04-9a3060e744cb, 'name': SearchDatastore_Task, 'duration_secs': 0.022459} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.622483] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6614f682-eaa7-4a18-8f56-1129b1d353d3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.627688] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 966.627688] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5232b18e-dcdd-b075-972c-1736e6ed9941" [ 966.627688] env[63345]: _type = "Task" [ 966.627688] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.635273] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5232b18e-dcdd-b075-972c-1736e6ed9941, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.715913] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95865fd4-1899-48de-879c-1b4d5498289a tempest-ServersAdminTestJSON-620918024 tempest-ServersAdminTestJSON-620918024-project-member] Lock "3a85df04-3997-48a3-8992-f24fe997b3cc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.933s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.765356] env[63345]: DEBUG nova.network.neutron [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Successfully updated port: 267ad158-547a-4d3a-a838-3d964626d731 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 966.777549] env[63345]: DEBUG nova.compute.manager [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 966.777938] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 966.779701] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8278d75d-340d-4a81-b8a6-63837ef662bd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.789436] env[63345]: DEBUG nova.compute.manager [req-835cf7eb-536f-4bd1-b148-b75112d230b5 req-825a056b-4dc4-456d-8e00-1563c97699d9 service nova] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Received event network-changed-267ad158-547a-4d3a-a838-3d964626d731 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 966.789775] env[63345]: DEBUG nova.compute.manager [req-835cf7eb-536f-4bd1-b148-b75112d230b5 req-825a056b-4dc4-456d-8e00-1563c97699d9 service nova] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Refreshing instance network info cache due to event network-changed-267ad158-547a-4d3a-a838-3d964626d731. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 966.789929] env[63345]: DEBUG oslo_concurrency.lockutils [req-835cf7eb-536f-4bd1-b148-b75112d230b5 req-825a056b-4dc4-456d-8e00-1563c97699d9 service nova] Acquiring lock "refresh_cache-22a11cf9-8f85-4371-98eb-25b267c9aff7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.790166] env[63345]: DEBUG oslo_concurrency.lockutils [req-835cf7eb-536f-4bd1-b148-b75112d230b5 req-825a056b-4dc4-456d-8e00-1563c97699d9 service nova] Acquired lock "refresh_cache-22a11cf9-8f85-4371-98eb-25b267c9aff7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.790398] env[63345]: DEBUG nova.network.neutron [req-835cf7eb-536f-4bd1-b148-b75112d230b5 req-825a056b-4dc4-456d-8e00-1563c97699d9 service nova] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Refreshing network info cache for port 267ad158-547a-4d3a-a838-3d964626d731 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 966.794250] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 966.794700] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-672c5a76-b10b-4e37-8f99-18425618568a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.802433] env[63345]: DEBUG oslo_vmware.api [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 966.802433] env[63345]: value = "task-1017564" [ 966.802433] env[63345]: _type = "Task" [ 966.802433] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.810539] env[63345]: DEBUG oslo_vmware.api [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017564, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.825029] env[63345]: DEBUG nova.compute.manager [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 966.825800] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b04500e-e0d3-4b29-bcf4-cf85f636ab15 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.940560] env[63345]: DEBUG nova.objects.base [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Object Instance<0fe61754-458c-4c5c-bb2d-2677302e5fb9> lazy-loaded attributes: info_cache,migration_context {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 966.941642] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2638cda-dba2-451a-9b8c-795de612b248 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.964803] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e41d9c6d-6550-41ef-8e6e-744019726f4a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.971352] env[63345]: DEBUG oslo_vmware.api [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 966.971352] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52fe1ffc-bf19-0235-0923-e2c61c207f44" [ 966.971352] env[63345]: _type = "Task" [ 966.971352] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.981347] env[63345]: DEBUG oslo_vmware.api [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52fe1ffc-bf19-0235-0923-e2c61c207f44, 'name': SearchDatastore_Task, 'duration_secs': 0.007751} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.981550] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.138480] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5232b18e-dcdd-b075-972c-1736e6ed9941, 'name': SearchDatastore_Task, 'duration_secs': 0.011013} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.138760] env[63345]: DEBUG oslo_concurrency.lockutils [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.139041] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 732ac30c-15c1-4c57-bb70-ea3ed51f646b/732ac30c-15c1-4c57-bb70-ea3ed51f646b.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 967.139319] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d39f49f-0b18-48a4-8c92-3248954bc464 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.146693] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 967.146693] env[63345]: value = "task-1017565" [ 967.146693] env[63345]: _type = "Task" [ 967.146693] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.154520] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017565, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.268123] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "refresh_cache-22a11cf9-8f85-4371-98eb-25b267c9aff7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 967.319800] env[63345]: DEBUG oslo_vmware.api [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017564, 'name': PowerOffVM_Task, 'duration_secs': 0.23729} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.324446] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 967.325064] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 967.325833] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7700d6fb-12a1-4b78-acf9-501084a27e56 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.342294] env[63345]: INFO nova.compute.manager [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] instance snapshotting [ 967.346514] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-972db93c-964c-4578-8088-160ce616600d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.350942] env[63345]: DEBUG nova.network.neutron [req-835cf7eb-536f-4bd1-b148-b75112d230b5 req-825a056b-4dc4-456d-8e00-1563c97699d9 service nova] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 967.380239] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85381a33-278d-4054-ba3f-de4c78aa6018 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.451334] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ec035c-bdbc-4dc8-885d-064e0be003c3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.462455] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99344bd-2f8e-47a4-94af-1bfbd337226c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.499014] env[63345]: DEBUG nova.network.neutron [req-835cf7eb-536f-4bd1-b148-b75112d230b5 req-825a056b-4dc4-456d-8e00-1563c97699d9 service nova] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.500929] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28b5fb0-82ce-41f3-a6d4-fe22a15b0694 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.516102] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede9d9ab-8886-41ca-9c3c-ce3279bb3855 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.534197] env[63345]: DEBUG nova.compute.provider_tree [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 967.657962] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017565, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49808} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.658263] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 732ac30c-15c1-4c57-bb70-ea3ed51f646b/732ac30c-15c1-4c57-bb70-ea3ed51f646b.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 967.658491] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 967.658787] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6d9e312e-4719-41c6-8157-f710cca4ac37 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.665740] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 967.665740] env[63345]: value = "task-1017567" [ 967.665740] env[63345]: _type = "Task" [ 967.665740] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.675054] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017567, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.892974] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Creating Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 967.892974] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-61e4ed36-e8f0-4828-b5c5-9073505db916 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.898716] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 967.898951] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 967.899164] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Deleting the datastore file [datastore2] 070a834d-6478-4705-8df0-2a27c8780507 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 967.899434] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-285e3f33-9f8c-471a-8485-ff2b99d47580 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.903393] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the task: (returnval){ [ 967.903393] env[63345]: value = "task-1017568" [ 967.903393] env[63345]: _type = "Task" [ 967.903393] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.907791] env[63345]: DEBUG oslo_vmware.api [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for the task: (returnval){ [ 967.907791] env[63345]: value = "task-1017569" [ 967.907791] env[63345]: _type = "Task" [ 967.907791] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.914986] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017568, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.922244] env[63345]: DEBUG oslo_vmware.api [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017569, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.005839] env[63345]: DEBUG oslo_concurrency.lockutils [req-835cf7eb-536f-4bd1-b148-b75112d230b5 req-825a056b-4dc4-456d-8e00-1563c97699d9 service nova] Releasing lock "refresh_cache-22a11cf9-8f85-4371-98eb-25b267c9aff7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.006237] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquired lock "refresh_cache-22a11cf9-8f85-4371-98eb-25b267c9aff7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.006493] env[63345]: DEBUG nova.network.neutron [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 968.037509] env[63345]: DEBUG nova.virt.hardware [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 968.037787] env[63345]: DEBUG nova.virt.hardware [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 968.037927] env[63345]: DEBUG nova.virt.hardware [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 968.038157] env[63345]: DEBUG nova.virt.hardware [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 968.038514] env[63345]: DEBUG nova.virt.hardware [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 968.038514] env[63345]: DEBUG nova.virt.hardware [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 968.038680] env[63345]: DEBUG nova.virt.hardware [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 968.038848] env[63345]: DEBUG nova.virt.hardware [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 968.039035] env[63345]: DEBUG nova.virt.hardware [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 968.039242] env[63345]: DEBUG nova.virt.hardware [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 968.039533] env[63345]: DEBUG nova.virt.hardware [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 968.040441] env[63345]: DEBUG nova.scheduler.client.report [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 968.044573] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a683e11-3024-4308-adf9-b663ae810d6c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.057119] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace0f02b-6923-4da0-8d9e-7557ff926af4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.077280] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:ff:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '68add7d6-c025-46fa-84d3-9c589adb63e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '277d5619-4a4c-4f02-9ce7-786f57c7dc46', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 968.085673] env[63345]: DEBUG oslo.service.loopingcall [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 968.086394] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 968.086667] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8240c2cf-5f45-4814-a0ab-a829463d6e80 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.107985] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 968.107985] env[63345]: value = "task-1017570" [ 968.107985] env[63345]: _type = "Task" [ 968.107985] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.116434] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017570, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.175402] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017567, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072907} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.175676] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 968.176481] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ac2bc06-33d6-4086-a948-3972eddcc2ce {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.199861] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 732ac30c-15c1-4c57-bb70-ea3ed51f646b/732ac30c-15c1-4c57-bb70-ea3ed51f646b.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 968.200234] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9747a79-d22d-4198-8181-daecbff0922c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.223441] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 968.223441] env[63345]: value = "task-1017571" [ 968.223441] env[63345]: _type = "Task" [ 968.223441] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.240711] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017571, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.416502] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017568, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.422443] env[63345]: DEBUG oslo_vmware.api [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Task: {'id': task-1017569, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138305} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.422704] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 968.422909] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 968.423128] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 968.423328] env[63345]: INFO nova.compute.manager [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Took 1.65 seconds to destroy the instance on the hypervisor. [ 968.423573] env[63345]: DEBUG oslo.service.loopingcall [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 968.423766] env[63345]: DEBUG nova.compute.manager [-] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 968.423860] env[63345]: DEBUG nova.network.neutron [-] [instance: 070a834d-6478-4705-8df0-2a27c8780507] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 968.555042] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.375s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.555042] env[63345]: DEBUG nova.compute.manager [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 968.559321] env[63345]: DEBUG nova.network.neutron [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 968.562030] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.836s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.562541] env[63345]: DEBUG nova.objects.instance [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lazy-loading 'resources' on Instance uuid c84c8b9a-9164-4dd7-b094-dd09c15c6f21 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 968.624525] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017570, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.736951] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017571, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.756627] env[63345]: DEBUG nova.network.neutron [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Updating instance_info_cache with network_info: [{"id": "267ad158-547a-4d3a-a838-3d964626d731", "address": "fa:16:3e:9e:ba:8d", "network": {"id": "13df4553-212e-4adb-8de0-da1acdf99671", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-238696814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4560e378b6aa47a3bbb5a2f7c5b76f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap267ad158-54", "ovs_interfaceid": "267ad158-547a-4d3a-a838-3d964626d731", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.915409] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017568, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.002202] env[63345]: DEBUG nova.compute.manager [req-fb13ebb0-39e3-4f15-8ca1-afc254b51879 req-11decbd2-e218-45bc-b2ea-b58322ec84bc service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Received event network-vif-deleted-cf06de95-5747-4226-b66c-b9ccca47321d {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 969.002483] env[63345]: INFO nova.compute.manager [req-fb13ebb0-39e3-4f15-8ca1-afc254b51879 req-11decbd2-e218-45bc-b2ea-b58322ec84bc service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Neutron deleted interface cf06de95-5747-4226-b66c-b9ccca47321d; detaching it from the instance and deleting it from the info cache [ 969.002718] env[63345]: DEBUG nova.network.neutron [req-fb13ebb0-39e3-4f15-8ca1-afc254b51879 req-11decbd2-e218-45bc-b2ea-b58322ec84bc service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.066705] env[63345]: DEBUG nova.compute.utils [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 969.071202] env[63345]: DEBUG nova.compute.manager [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 969.071421] env[63345]: DEBUG nova.network.neutron [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 969.125354] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017570, 'name': CreateVM_Task, 'duration_secs': 0.645145} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.127098] env[63345]: DEBUG nova.policy [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e36fd04030444217acadbbf4e4fe9be0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33c28bfca4da460e8ca96dc7519204c8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 969.128536] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 969.131438] env[63345]: DEBUG oslo_concurrency.lockutils [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.131637] env[63345]: DEBUG oslo_concurrency.lockutils [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.132841] env[63345]: DEBUG oslo_concurrency.lockutils [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 969.132841] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6a76024-9e27-482b-aa19-d40893d7dc50 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.138334] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 969.138334] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5294a50b-eada-2ae2-79f3-33692afe5a93" [ 969.138334] env[63345]: _type = "Task" [ 969.138334] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.147564] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5294a50b-eada-2ae2-79f3-33692afe5a93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.239026] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017571, 'name': ReconfigVM_Task, 'duration_secs': 0.790924} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.242196] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 732ac30c-15c1-4c57-bb70-ea3ed51f646b/732ac30c-15c1-4c57-bb70-ea3ed51f646b.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 969.242929] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3620d8a7-9928-4919-9fa5-5dbb015bf29f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.252644] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 969.252644] env[63345]: value = "task-1017572" [ 969.252644] env[63345]: _type = "Task" [ 969.252644] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.261950] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Releasing lock "refresh_cache-22a11cf9-8f85-4371-98eb-25b267c9aff7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.262405] env[63345]: DEBUG nova.compute.manager [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Instance network_info: |[{"id": "267ad158-547a-4d3a-a838-3d964626d731", "address": "fa:16:3e:9e:ba:8d", "network": {"id": "13df4553-212e-4adb-8de0-da1acdf99671", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-238696814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4560e378b6aa47a3bbb5a2f7c5b76f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap267ad158-54", "ovs_interfaceid": "267ad158-547a-4d3a-a838-3d964626d731", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 969.262718] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017572, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.265485] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:ba:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '90328c7b-15c4-4742-805b-755248d67029', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '267ad158-547a-4d3a-a838-3d964626d731', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 969.273327] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Creating folder: Project (4560e378b6aa47a3bbb5a2f7c5b76f5f). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 969.274227] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b6c8ec78-e9a7-4316-aeb5-7e5a509b7c8a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.287121] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Created folder: Project (4560e378b6aa47a3bbb5a2f7c5b76f5f) in parent group-v225918. [ 969.287413] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Creating folder: Instances. Parent ref: group-v226123. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 969.290702] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17e8196a-533a-400f-88c2-ea54f7747854 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.303207] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Created folder: Instances in parent group-v226123. [ 969.303367] env[63345]: DEBUG oslo.service.loopingcall [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 969.303584] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 969.306280] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1cd4f9f9-0090-4e57-b6fd-14b7d79bd4c3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.327806] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 969.327806] env[63345]: value = "task-1017575" [ 969.327806] env[63345]: _type = "Task" [ 969.327806] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.342118] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017575, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.347262] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1bb893-ca79-4a25-96ec-838d6ab03fe1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.355010] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d57fd4f-ddde-4cb1-89d2-5ff2ecb2035b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.393056] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-032a457f-0dab-4487-b27a-0fb53f8a6f29 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.402060] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda53d67-adbc-4ec3-af6a-45f38a21148f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.416367] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017568, 'name': CreateSnapshot_Task, 'duration_secs': 1.307548} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.424601] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Created Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 969.425334] env[63345]: DEBUG nova.compute.provider_tree [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.429716] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48f4caf-a8fe-4351-92f4-e7b9b7d45e34 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.436051] env[63345]: DEBUG nova.network.neutron [-] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.463733] env[63345]: DEBUG nova.network.neutron [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Successfully created port: 8a3e5f64-f812-4c1b-a9e0-b8b3146a1467 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 969.505917] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-69feaf5d-cbfd-46b6-a390-a8c142d859d6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.518230] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb384f0-cf93-476e-9d36-2b6f056275d3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.553687] env[63345]: DEBUG nova.compute.manager [req-fb13ebb0-39e3-4f15-8ca1-afc254b51879 req-11decbd2-e218-45bc-b2ea-b58322ec84bc service nova] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Detach interface failed, port_id=cf06de95-5747-4226-b66c-b9ccca47321d, reason: Instance 070a834d-6478-4705-8df0-2a27c8780507 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 969.572398] env[63345]: DEBUG nova.compute.manager [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 969.648993] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5294a50b-eada-2ae2-79f3-33692afe5a93, 'name': SearchDatastore_Task, 'duration_secs': 0.011229} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.649337] env[63345]: DEBUG oslo_concurrency.lockutils [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.649580] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 969.649821] env[63345]: DEBUG oslo_concurrency.lockutils [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.649973] env[63345]: DEBUG oslo_concurrency.lockutils [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.650173] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 969.650454] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf59a185-d071-4f01-a472-04a7eafda4f4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.661680] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 969.661904] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 969.662913] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4edbb7b-7a11-443d-95b2-968287cf5cec {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.673857] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 969.673857] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529f4c85-f1fa-7b30-719a-c0fc56ed4326" [ 969.673857] env[63345]: _type = "Task" [ 969.673857] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.682796] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]529f4c85-f1fa-7b30-719a-c0fc56ed4326, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.765054] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017572, 'name': Rename_Task, 'duration_secs': 0.203144} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.765372] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 969.765632] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c00a8d72-371e-44a8-a85f-d40a5cb3f491 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.774192] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 969.774192] env[63345]: value = "task-1017576" [ 969.774192] env[63345]: _type = "Task" [ 969.774192] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.789760] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017576, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.842367] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017575, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.934021] env[63345]: DEBUG nova.scheduler.client.report [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 969.940700] env[63345]: INFO nova.compute.manager [-] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Took 1.52 seconds to deallocate network for instance. [ 969.948180] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Creating linked-clone VM from snapshot {{(pid=63345) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 969.950675] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5a1bb416-eadd-466f-b13c-cd20c3145b1f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.965662] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the task: (returnval){ [ 969.965662] env[63345]: value = "task-1017577" [ 969.965662] env[63345]: _type = "Task" [ 969.965662] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.977609] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017577, 'name': CloneVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.185275] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]529f4c85-f1fa-7b30-719a-c0fc56ed4326, 'name': SearchDatastore_Task, 'duration_secs': 0.029992} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.186109] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62dccf9f-2084-40d6-b8ee-60e42e92a83c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.192511] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 970.192511] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52e3fe5c-c5bc-c093-26be-ec300e77a30b" [ 970.192511] env[63345]: _type = "Task" [ 970.192511] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.201477] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e3fe5c-c5bc-c093-26be-ec300e77a30b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.285577] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017576, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.340272] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017575, 'name': CreateVM_Task, 'duration_secs': 0.514224} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.340432] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 970.341358] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.341358] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.341611] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 970.341880] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-481f891e-80a0-429d-adb2-9c0fe8bbc67b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.347821] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 970.347821] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5200b467-afbc-36be-5d45-da3d081b658d" [ 970.347821] env[63345]: _type = "Task" [ 970.347821] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.357494] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5200b467-afbc-36be-5d45-da3d081b658d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.437054] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.875s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.440424] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 3.459s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.460523] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.464205] env[63345]: INFO nova.scheduler.client.report [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleted allocations for instance c84c8b9a-9164-4dd7-b094-dd09c15c6f21 [ 970.478706] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017577, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.587113] env[63345]: DEBUG nova.compute.manager [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 970.617937] env[63345]: DEBUG nova.virt.hardware [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 970.618551] env[63345]: DEBUG nova.virt.hardware [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 970.618551] env[63345]: DEBUG nova.virt.hardware [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 970.618696] env[63345]: DEBUG nova.virt.hardware [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 970.618748] env[63345]: DEBUG nova.virt.hardware [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 970.618914] env[63345]: DEBUG nova.virt.hardware [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 970.619649] env[63345]: DEBUG nova.virt.hardware [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 970.620476] env[63345]: DEBUG nova.virt.hardware [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 970.620476] env[63345]: DEBUG nova.virt.hardware [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 970.622296] env[63345]: DEBUG nova.virt.hardware [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 970.622296] env[63345]: DEBUG nova.virt.hardware [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 970.622473] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da825bdb-720b-4f17-a25f-954a0d852b5d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.632076] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21fb93b-ca0c-4f47-b95e-7567240e4eda {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.703464] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e3fe5c-c5bc-c093-26be-ec300e77a30b, 'name': SearchDatastore_Task, 'duration_secs': 0.017475} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.703758] env[63345]: DEBUG oslo_concurrency.lockutils [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.704022] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 4868a0a0-ca35-44b0-a90c-124aa366af76/4868a0a0-ca35-44b0-a90c-124aa366af76.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 970.704298] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4057f48e-aba7-48e5-8181-7fc56c6b885c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.712376] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 970.712376] env[63345]: value = "task-1017578" [ 970.712376] env[63345]: _type = "Task" [ 970.712376] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.723286] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017578, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.785927] env[63345]: DEBUG oslo_vmware.api [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017576, 'name': PowerOnVM_Task, 'duration_secs': 0.669401} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.786248] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 970.786513] env[63345]: INFO nova.compute.manager [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Took 9.54 seconds to spawn the instance on the hypervisor. [ 970.786763] env[63345]: DEBUG nova.compute.manager [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 970.787589] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7095f084-c74a-4c2b-8fa8-5163b14fb1f9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.859638] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5200b467-afbc-36be-5d45-da3d081b658d, 'name': SearchDatastore_Task, 'duration_secs': 0.010922} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.860256] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.860525] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 970.860774] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.860932] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.861138] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 970.861405] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee14411f-9f10-4637-9e16-0d8d4117c890 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.870759] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 970.870874] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 970.871573] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2704bc8-4399-4031-a42d-5b198a90b5e6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.876948] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 970.876948] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5285ef3d-c8fe-85c0-35b2-f19558ec8a6b" [ 970.876948] env[63345]: _type = "Task" [ 970.876948] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.885366] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5285ef3d-c8fe-85c0-35b2-f19558ec8a6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.983440] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7f7d0aef-0d1c-4b09-a138-f48e4d7b60ea tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "c84c8b9a-9164-4dd7-b094-dd09c15c6f21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.214s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.991018] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017577, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.203216] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce82bcb-0476-4910-b63e-6090a591871c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.212986] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931f7a4e-c38a-4278-aa28-6d238f812328 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.228692] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017578, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.262226] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb83336b-083d-4e74-8edc-952972875333 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.272154] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304e4825-b8ae-4e01-80c3-1ee41f0d435d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.292530] env[63345]: DEBUG nova.compute.provider_tree [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.311204] env[63345]: INFO nova.compute.manager [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Took 15.58 seconds to build instance. [ 971.384298] env[63345]: DEBUG nova.network.neutron [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Successfully updated port: 8a3e5f64-f812-4c1b-a9e0-b8b3146a1467 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 971.394066] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5285ef3d-c8fe-85c0-35b2-f19558ec8a6b, 'name': SearchDatastore_Task, 'duration_secs': 0.011393} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.394066] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66e3128d-16d8-42bb-bfcc-88b5285db7ba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.400780] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 971.400780] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]521c5144-9dd4-1509-16cf-e768484ab5e0" [ 971.400780] env[63345]: _type = "Task" [ 971.400780] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.411512] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521c5144-9dd4-1509-16cf-e768484ab5e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.478093] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017577, 'name': CloneVM_Task, 'duration_secs': 1.504788} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.478447] env[63345]: INFO nova.virt.vmwareapi.vmops [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Created linked-clone VM from snapshot [ 971.479071] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8cfe28f-eda5-40b8-8906-fea3a404374b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.487334] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Uploading image 6b5e6190-95b5-4c2f-bc9f-f057f14d2ec4 {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 971.510858] env[63345]: DEBUG oslo_vmware.rw_handles [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 971.510858] env[63345]: value = "vm-226126" [ 971.510858] env[63345]: _type = "VirtualMachine" [ 971.510858] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 971.511308] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0276c3f4-466a-4aef-825a-ceec45563e65 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.519505] env[63345]: DEBUG oslo_vmware.rw_handles [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Lease: (returnval){ [ 971.519505] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]520a241c-b1ed-88d0-b99b-0540479c56bd" [ 971.519505] env[63345]: _type = "HttpNfcLease" [ 971.519505] env[63345]: } obtained for exporting VM: (result){ [ 971.519505] env[63345]: value = "vm-226126" [ 971.519505] env[63345]: _type = "VirtualMachine" [ 971.519505] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 971.519847] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the lease: (returnval){ [ 971.519847] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]520a241c-b1ed-88d0-b99b-0540479c56bd" [ 971.519847] env[63345]: _type = "HttpNfcLease" [ 971.519847] env[63345]: } to be ready. {{(pid=63345) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 971.526710] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 971.526710] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]520a241c-b1ed-88d0-b99b-0540479c56bd" [ 971.526710] env[63345]: _type = "HttpNfcLease" [ 971.526710] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 971.700936] env[63345]: DEBUG nova.compute.manager [req-761f341d-e315-4313-a733-0492b3570893 req-7874eaf3-fe96-4e25-bfa4-457b38ac925d service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Received event network-vif-plugged-8a3e5f64-f812-4c1b-a9e0-b8b3146a1467 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 971.701213] env[63345]: DEBUG oslo_concurrency.lockutils [req-761f341d-e315-4313-a733-0492b3570893 req-7874eaf3-fe96-4e25-bfa4-457b38ac925d service nova] Acquiring lock "dd624e54-bd5b-4660-88a1-9d6f36560421-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.701697] env[63345]: DEBUG oslo_concurrency.lockutils [req-761f341d-e315-4313-a733-0492b3570893 req-7874eaf3-fe96-4e25-bfa4-457b38ac925d service nova] Lock "dd624e54-bd5b-4660-88a1-9d6f36560421-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.701853] env[63345]: DEBUG oslo_concurrency.lockutils [req-761f341d-e315-4313-a733-0492b3570893 req-7874eaf3-fe96-4e25-bfa4-457b38ac925d service nova] Lock "dd624e54-bd5b-4660-88a1-9d6f36560421-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.702402] env[63345]: DEBUG nova.compute.manager [req-761f341d-e315-4313-a733-0492b3570893 req-7874eaf3-fe96-4e25-bfa4-457b38ac925d service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] No waiting events found dispatching network-vif-plugged-8a3e5f64-f812-4c1b-a9e0-b8b3146a1467 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 971.702624] env[63345]: WARNING nova.compute.manager [req-761f341d-e315-4313-a733-0492b3570893 req-7874eaf3-fe96-4e25-bfa4-457b38ac925d service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Received unexpected event network-vif-plugged-8a3e5f64-f812-4c1b-a9e0-b8b3146a1467 for instance with vm_state building and task_state spawning. [ 971.728370] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017578, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.631084} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.728954] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 4868a0a0-ca35-44b0-a90c-124aa366af76/4868a0a0-ca35-44b0-a90c-124aa366af76.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 971.728954] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 971.729235] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b62170a4-86f4-4df8-83b3-01d78b2ba5c0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.738338] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 971.738338] env[63345]: value = "task-1017580" [ 971.738338] env[63345]: _type = "Task" [ 971.738338] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.747334] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017580, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.802802] env[63345]: DEBUG nova.scheduler.client.report [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 971.814245] env[63345]: DEBUG oslo_concurrency.lockutils [None req-68a599ed-6235-45ef-bd7d-d55f9ea3017d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "732ac30c-15c1-4c57-bb70-ea3ed51f646b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.094s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.891835] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.892069] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.892251] env[63345]: DEBUG nova.network.neutron [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 971.913313] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521c5144-9dd4-1509-16cf-e768484ab5e0, 'name': SearchDatastore_Task, 'duration_secs': 0.014038} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.914118] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.914118] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 22a11cf9-8f85-4371-98eb-25b267c9aff7/22a11cf9-8f85-4371-98eb-25b267c9aff7.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 971.914118] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ced65b2-a449-48a7-aa7d-513714bc4a18 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.921475] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 971.921475] env[63345]: value = "task-1017581" [ 971.921475] env[63345]: _type = "Task" [ 971.921475] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.930092] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017581, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.028402] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 972.028402] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]520a241c-b1ed-88d0-b99b-0540479c56bd" [ 972.028402] env[63345]: _type = "HttpNfcLease" [ 972.028402] env[63345]: } is ready. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 972.028696] env[63345]: DEBUG oslo_vmware.rw_handles [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 972.028696] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]520a241c-b1ed-88d0-b99b-0540479c56bd" [ 972.028696] env[63345]: _type = "HttpNfcLease" [ 972.028696] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 972.029414] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9817f1a1-8d7e-40e7-a27c-519ed7c140ec {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.039013] env[63345]: DEBUG oslo_vmware.rw_handles [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5264121a-3e6f-50e9-1d82-49be7794463d/disk-0.vmdk from lease info. {{(pid=63345) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 972.039013] env[63345]: DEBUG oslo_vmware.rw_handles [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5264121a-3e6f-50e9-1d82-49be7794463d/disk-0.vmdk for reading. {{(pid=63345) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 972.176927] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "732ac30c-15c1-4c57-bb70-ea3ed51f646b" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.177238] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "732ac30c-15c1-4c57-bb70-ea3ed51f646b" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.177429] env[63345]: INFO nova.compute.manager [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Shelving [ 972.231559] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0b660ae1-e571-4b18-815a-4d1e8b140cdc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.250044] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017580, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.233155} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.252597] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 972.254069] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-787f1ba9-b53f-49a8-b5f7-159aa5ca4e99 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.288679] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 4868a0a0-ca35-44b0-a90c-124aa366af76/4868a0a0-ca35-44b0-a90c-124aa366af76.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 972.294858] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84784370-5d73-42e7-9cb8-329286a55258 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.317486] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "3d644f16-7924-4545-a528-1499a702d614" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.317636] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "3d644f16-7924-4545-a528-1499a702d614" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.324060] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 972.324060] env[63345]: value = "task-1017582" [ 972.324060] env[63345]: _type = "Task" [ 972.324060] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.335452] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017582, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.433161] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017581, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.445651] env[63345]: DEBUG nova.network.neutron [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 972.642377] env[63345]: DEBUG nova.network.neutron [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Updating instance_info_cache with network_info: [{"id": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "address": "fa:16:3e:55:5a:7c", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a3e5f64-f8", "ovs_interfaceid": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.818726] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.378s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.822576] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.362s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.822732] env[63345]: DEBUG nova.objects.instance [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lazy-loading 'resources' on Instance uuid 070a834d-6478-4705-8df0-2a27c8780507 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.827800] env[63345]: DEBUG nova.compute.manager [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 972.850847] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017582, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.936379] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017581, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.887097} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.936827] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 22a11cf9-8f85-4371-98eb-25b267c9aff7/22a11cf9-8f85-4371-98eb-25b267c9aff7.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 972.937141] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 972.937494] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c8b1621e-53a2-4959-ad43-9a3a138a0603 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.947586] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 972.947586] env[63345]: value = "task-1017583" [ 972.947586] env[63345]: _type = "Task" [ 972.947586] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.957843] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017583, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.145643] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.145643] env[63345]: DEBUG nova.compute.manager [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Instance network_info: |[{"id": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "address": "fa:16:3e:55:5a:7c", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a3e5f64-f8", "ovs_interfaceid": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 973.146119] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:5a:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f35e69ef-c2c8-4b8c-9887-33e97b242c0a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8a3e5f64-f812-4c1b-a9e0-b8b3146a1467', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 973.155345] env[63345]: DEBUG oslo.service.loopingcall [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 973.155793] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 973.156160] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7727beac-c8fb-439c-9eea-2137dc249b4b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.178619] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 973.178619] env[63345]: value = "task-1017584" [ 973.178619] env[63345]: _type = "Task" [ 973.178619] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.188492] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017584, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.194870] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 973.195207] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-55569a66-e19c-407d-91a7-fa86fdf88cf5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.204133] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 973.204133] env[63345]: value = "task-1017585" [ 973.204133] env[63345]: _type = "Task" [ 973.204133] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.216066] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017585, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.361745] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017582, 'name': ReconfigVM_Task, 'duration_secs': 0.661294} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.365748] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 4868a0a0-ca35-44b0-a90c-124aa366af76/4868a0a0-ca35-44b0-a90c-124aa366af76.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 973.367385] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'size': 0, 'encryption_format': None, 'encrypted': False, 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'disk_bus': None, 'guest_format': None, 'device_name': '/dev/sda', 'encryption_secret_uuid': None, 'image_id': '2ff49e1b-8f44-4332-bba9-777d55ff62c4'}], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226116', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'name': 'volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4868a0a0-ca35-44b0-a90c-124aa366af76', 'attached_at': '', 'detached_at': '', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'serial': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324'}, 'delete_on_termination': False, 'mount_device': '/dev/sdb', 'device_type': None, 'boot_index': None, 'disk_bus': None, 'guest_format': None, 'attachment_id': '1fc9cca8-87f3-4f4a-96cd-60ec4383aef1', 'volume_type': None}], 'swap': None} {{(pid=63345) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 973.367894] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Volume attach. Driver type: vmdk {{(pid=63345) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 973.368127] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226116', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'name': 'volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4868a0a0-ca35-44b0-a90c-124aa366af76', 'attached_at': '', 'detached_at': '', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'serial': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 973.373079] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbda8f48-a363-4dfc-8acd-b5c9dd2ed8c0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.377553] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.403065] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ddfd77b-a5e0-4abb-9e2f-d832da32e0e1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.438399] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324/volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 973.442307] env[63345]: INFO nova.scheduler.client.report [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Deleted allocation for migration 065e3d6c-c475-45d4-ae3e-8a6d8de03d6b [ 973.444061] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1a7a16a-e94d-42c9-8a69-79f3bac06706 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.475045] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017583, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074878} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.477339] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 973.479923] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 973.479923] env[63345]: value = "task-1017586" [ 973.479923] env[63345]: _type = "Task" [ 973.479923] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.479923] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94b5633-d3bf-4696-8a13-b8d995cb5f59 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.502031] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017586, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.523813] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 22a11cf9-8f85-4371-98eb-25b267c9aff7/22a11cf9-8f85-4371-98eb-25b267c9aff7.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 973.528117] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6fc0f338-4c50-473c-8eaa-0476cd35d98c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.552742] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 973.552742] env[63345]: value = "task-1017587" [ 973.552742] env[63345]: _type = "Task" [ 973.552742] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.566526] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017587, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.692664] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017584, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.694545] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92bd82e7-b113-4bb0-ad2b-8ed80d4954be {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.704768] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6e113d-c4fc-40b8-8a66-e74dd7ceae74 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.718192] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017585, 'name': PowerOffVM_Task, 'duration_secs': 0.222766} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.749437] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 973.750056] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bbf78be-e95f-4afc-ab94-fc6b7c2d2d3b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.753195] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6d25fe-f66c-49ed-add7-71ac6d087aa1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.776867] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a95266-98d0-4b2d-9e30-ba0b3b355641 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.783479] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62aa4642-ed41-4d07-bf8e-79155cf6a2d4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.786579] env[63345]: DEBUG nova.compute.manager [req-a46f4135-425d-48e4-b5fb-1160bec28698 req-bbc270bf-3d51-4c3d-80e5-d2a9e76117da service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Received event network-changed-8a3e5f64-f812-4c1b-a9e0-b8b3146a1467 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 973.786672] env[63345]: DEBUG nova.compute.manager [req-a46f4135-425d-48e4-b5fb-1160bec28698 req-bbc270bf-3d51-4c3d-80e5-d2a9e76117da service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Refreshing instance network info cache due to event network-changed-8a3e5f64-f812-4c1b-a9e0-b8b3146a1467. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 973.787752] env[63345]: DEBUG oslo_concurrency.lockutils [req-a46f4135-425d-48e4-b5fb-1160bec28698 req-bbc270bf-3d51-4c3d-80e5-d2a9e76117da service nova] Acquiring lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.787752] env[63345]: DEBUG oslo_concurrency.lockutils [req-a46f4135-425d-48e4-b5fb-1160bec28698 req-bbc270bf-3d51-4c3d-80e5-d2a9e76117da service nova] Acquired lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.787752] env[63345]: DEBUG nova.network.neutron [req-a46f4135-425d-48e4-b5fb-1160bec28698 req-bbc270bf-3d51-4c3d-80e5-d2a9e76117da service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Refreshing network info cache for port 8a3e5f64-f812-4c1b-a9e0-b8b3146a1467 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 973.799454] env[63345]: DEBUG nova.compute.provider_tree [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 973.969140] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7a025aa8-413d-4515-be61-b039c163c688 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "0fe61754-458c-4c5c-bb2d-2677302e5fb9" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.862s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.996479] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017586, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.063399] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017587, 'name': ReconfigVM_Task, 'duration_secs': 0.407778} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.063593] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 22a11cf9-8f85-4371-98eb-25b267c9aff7/22a11cf9-8f85-4371-98eb-25b267c9aff7.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 974.064364] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7183f861-727a-451a-b431-f4c0ee44b487 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.073872] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 974.073872] env[63345]: value = "task-1017588" [ 974.073872] env[63345]: _type = "Task" [ 974.073872] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.087993] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017588, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.197500] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017584, 'name': CreateVM_Task, 'duration_secs': 0.534207} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.197500] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 974.197982] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.198333] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.198838] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 974.199237] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63b2f90c-761f-4b41-aff3-3bfc3526d1d3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.206867] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 974.206867] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5207a095-55d5-25e0-790b-e8a71a83b38c" [ 974.206867] env[63345]: _type = "Task" [ 974.206867] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.219829] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5207a095-55d5-25e0-790b-e8a71a83b38c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.312775] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Creating Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 974.314889] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4d8aa765-330d-4f8e-8aef-60598de685ed {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.323168] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 974.323168] env[63345]: value = "task-1017589" [ 974.323168] env[63345]: _type = "Task" [ 974.323168] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.332960] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017589, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.339689] env[63345]: DEBUG oslo_concurrency.lockutils [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "0fe61754-458c-4c5c-bb2d-2677302e5fb9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.339990] env[63345]: DEBUG oslo_concurrency.lockutils [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "0fe61754-458c-4c5c-bb2d-2677302e5fb9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.340482] env[63345]: DEBUG oslo_concurrency.lockutils [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "0fe61754-458c-4c5c-bb2d-2677302e5fb9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.340482] env[63345]: DEBUG oslo_concurrency.lockutils [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "0fe61754-458c-4c5c-bb2d-2677302e5fb9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.340635] env[63345]: DEBUG oslo_concurrency.lockutils [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "0fe61754-458c-4c5c-bb2d-2677302e5fb9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.343393] env[63345]: INFO nova.compute.manager [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Terminating instance [ 974.349235] env[63345]: ERROR nova.scheduler.client.report [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [req-be114bd2-0417-44e6-bdff-ecf61b7d06df] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-be114bd2-0417-44e6-bdff-ecf61b7d06df"}]} [ 974.381200] env[63345]: DEBUG nova.scheduler.client.report [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 974.398046] env[63345]: DEBUG nova.scheduler.client.report [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 974.398396] env[63345]: DEBUG nova.compute.provider_tree [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 974.414538] env[63345]: DEBUG nova.scheduler.client.report [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 974.438456] env[63345]: DEBUG nova.scheduler.client.report [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 974.494590] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017586, 'name': ReconfigVM_Task, 'duration_secs': 0.574799} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.495213] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Reconfigured VM instance instance-00000056 to attach disk [datastore1] volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324/volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 974.506038] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33052d3c-3c45-40fa-8a5a-b1a2dfd34553 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.524821] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 974.524821] env[63345]: value = "task-1017590" [ 974.524821] env[63345]: _type = "Task" [ 974.524821] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.540269] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017590, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.591714] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017588, 'name': Rename_Task, 'duration_secs': 0.189204} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.591966] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 974.592279] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fede72e1-da45-4084-8cf9-a66e58ddcc0f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.602337] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 974.602337] env[63345]: value = "task-1017591" [ 974.602337] env[63345]: _type = "Task" [ 974.602337] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.612862] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017591, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.702517] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a587284-dfe2-42d3-a4b7-c5fbb8da5c5a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.713765] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c0d911-6089-4b7c-84e7-07beca468132 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.723589] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5207a095-55d5-25e0-790b-e8a71a83b38c, 'name': SearchDatastore_Task, 'duration_secs': 0.011565} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.724581] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.725111] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 974.726635] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.726635] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.726635] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 974.726800] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7fafe5a8-228a-412b-921b-7135dbb8a891 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.764205] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34ae642-29b3-44f7-907a-26e5633eeb53 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.773057] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 974.773411] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 974.777912] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ee71a19-60b5-47cd-9039-d7c67d4611cb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.782345] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8669aa7-ff23-4796-b1b0-63928bb82c43 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.791804] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 974.791804] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52fb1187-85c8-fa3c-4430-d3f9d397abac" [ 974.791804] env[63345]: _type = "Task" [ 974.791804] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.809828] env[63345]: DEBUG nova.compute.provider_tree [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 974.826178] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52fb1187-85c8-fa3c-4430-d3f9d397abac, 'name': SearchDatastore_Task, 'duration_secs': 0.012244} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.831621] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4386557-3228-4dfb-834c-0c41fd5a934b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.842649] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 974.842649] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]521ec945-5d5f-781a-842f-2c2e20e2c0a3" [ 974.842649] env[63345]: _type = "Task" [ 974.842649] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.843691] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017589, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.856249] env[63345]: DEBUG nova.compute.manager [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 974.856249] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 974.856537] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521ec945-5d5f-781a-842f-2c2e20e2c0a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.857496] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a772f9-7e25-47fb-9869-6e387e9ede96 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.867055] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 974.869931] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9b9301f-a0b9-4aa9-acd9-3a4c542bb628 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.879617] env[63345]: DEBUG oslo_vmware.api [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 974.879617] env[63345]: value = "task-1017592" [ 974.879617] env[63345]: _type = "Task" [ 974.879617] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.889407] env[63345]: DEBUG oslo_vmware.api [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017592, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.953897] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "a8321259-b3a6-4e87-b13a-b964cf0dd766" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.954193] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "a8321259-b3a6-4e87-b13a-b964cf0dd766" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.985259] env[63345]: DEBUG nova.network.neutron [req-a46f4135-425d-48e4-b5fb-1160bec28698 req-bbc270bf-3d51-4c3d-80e5-d2a9e76117da service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Updated VIF entry in instance network info cache for port 8a3e5f64-f812-4c1b-a9e0-b8b3146a1467. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 974.985644] env[63345]: DEBUG nova.network.neutron [req-a46f4135-425d-48e4-b5fb-1160bec28698 req-bbc270bf-3d51-4c3d-80e5-d2a9e76117da service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Updating instance_info_cache with network_info: [{"id": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "address": "fa:16:3e:55:5a:7c", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a3e5f64-f8", "ovs_interfaceid": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.038189] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017590, 'name': ReconfigVM_Task, 'duration_secs': 0.258636} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.039117] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226116', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'name': 'volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4868a0a0-ca35-44b0-a90c-124aa366af76', 'attached_at': '', 'detached_at': '', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'serial': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 975.039851] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd3223d0-7b18-4780-9354-54ec117d3574 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.050298] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 975.050298] env[63345]: value = "task-1017593" [ 975.050298] env[63345]: _type = "Task" [ 975.050298] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.062719] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017593, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.114468] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017591, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.339262] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017589, 'name': CreateSnapshot_Task, 'duration_secs': 0.526128} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.339605] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Created Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 975.340855] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06cfe3b0-84ce-4427-b6b2-fe61a392f098 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.345425] env[63345]: ERROR nova.scheduler.client.report [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] [req-2a32d45d-20df-48c9-a6d0-c833ad938e65] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2a32d45d-20df-48c9-a6d0-c833ad938e65"}]} [ 975.366613] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521ec945-5d5f-781a-842f-2c2e20e2c0a3, 'name': SearchDatastore_Task, 'duration_secs': 0.026363} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.367868] env[63345]: DEBUG nova.scheduler.client.report [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 975.369920] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.370210] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] dd624e54-bd5b-4660-88a1-9d6f36560421/dd624e54-bd5b-4660-88a1-9d6f36560421.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 975.370662] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f52f4cee-a0ae-424f-820e-2a83d200fac0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.379632] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 975.379632] env[63345]: value = "task-1017594" [ 975.379632] env[63345]: _type = "Task" [ 975.379632] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.394586] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017594, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.399550] env[63345]: DEBUG oslo_vmware.api [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017592, 'name': PowerOffVM_Task, 'duration_secs': 0.246388} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.400055] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 975.400198] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 975.400517] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0530a875-1096-4cf3-bf88-0329af480dda {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.403383] env[63345]: DEBUG nova.scheduler.client.report [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 975.403667] env[63345]: DEBUG nova.compute.provider_tree [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 975.420553] env[63345]: DEBUG nova.scheduler.client.report [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 975.447838] env[63345]: DEBUG nova.scheduler.client.report [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 975.456769] env[63345]: DEBUG nova.compute.manager [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 975.488662] env[63345]: DEBUG oslo_concurrency.lockutils [req-a46f4135-425d-48e4-b5fb-1160bec28698 req-bbc270bf-3d51-4c3d-80e5-d2a9e76117da service nova] Releasing lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.493752] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 975.494083] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 975.494308] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Deleting the datastore file [datastore2] 0fe61754-458c-4c5c-bb2d-2677302e5fb9 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 975.494697] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56932f74-a413-4d3a-afaf-dd894793bf21 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.503795] env[63345]: DEBUG oslo_vmware.api [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 975.503795] env[63345]: value = "task-1017596" [ 975.503795] env[63345]: _type = "Task" [ 975.503795] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.521228] env[63345]: DEBUG oslo_vmware.api [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017596, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.566749] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017593, 'name': Rename_Task, 'duration_secs': 0.432584} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.567121] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 975.567418] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb844886-8c13-408d-952a-e3d1f20480cc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.580185] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 975.580185] env[63345]: value = "task-1017597" [ 975.580185] env[63345]: _type = "Task" [ 975.580185] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.593290] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017597, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.618390] env[63345]: DEBUG oslo_vmware.api [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017591, 'name': PowerOnVM_Task, 'duration_secs': 0.739531} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.622300] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 975.622300] env[63345]: INFO nova.compute.manager [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Took 10.22 seconds to spawn the instance on the hypervisor. [ 975.622300] env[63345]: DEBUG nova.compute.manager [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 975.622300] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13bc3cce-f2a3-4fbd-be51-0717cf3a63ec {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.717603] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa78bc01-2675-4746-bc2c-692105c87dc6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.727054] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63c9cee-1083-40a4-a431-edc450168734 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.769589] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d332fdc5-be8d-437f-80f1-3ab44eebc1cd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.779159] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c79d218-a887-4b99-961b-a2a1595afe1e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.798040] env[63345]: DEBUG nova.compute.provider_tree [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 975.876165] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Creating linked-clone VM from snapshot {{(pid=63345) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 975.876609] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-74ba22ec-73a4-4093-86c6-f0762f9fb79a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.892484] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017594, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.894515] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 975.894515] env[63345]: value = "task-1017598" [ 975.894515] env[63345]: _type = "Task" [ 975.894515] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.905426] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017598, 'name': CloneVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.988494] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.019197] env[63345]: DEBUG oslo_vmware.api [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017596, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.317288} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.019643] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 976.019944] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 976.020423] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 976.020585] env[63345]: INFO nova.compute.manager [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Took 1.16 seconds to destroy the instance on the hypervisor. [ 976.020993] env[63345]: DEBUG oslo.service.loopingcall [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 976.021404] env[63345]: DEBUG nova.compute.manager [-] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 976.021570] env[63345]: DEBUG nova.network.neutron [-] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 976.094361] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017597, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.153330] env[63345]: INFO nova.compute.manager [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Took 16.56 seconds to build instance. [ 976.347370] env[63345]: DEBUG nova.scheduler.client.report [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 128 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 976.349346] env[63345]: DEBUG nova.compute.provider_tree [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 128 to 129 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 976.349682] env[63345]: DEBUG nova.compute.provider_tree [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 976.394963] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017594, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.656823} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.395592] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] dd624e54-bd5b-4660-88a1-9d6f36560421/dd624e54-bd5b-4660-88a1-9d6f36560421.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 976.395771] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 976.401368] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-872c7ec8-d8a1-4af3-b157-a8e3ff8f3a11 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.411718] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017598, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.413545] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 976.413545] env[63345]: value = "task-1017599" [ 976.413545] env[63345]: _type = "Task" [ 976.413545] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.430085] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017599, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.593999] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017597, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.655322] env[63345]: DEBUG nova.compute.manager [req-db061011-0fd7-46c1-9ef2-768cfff0cd80 req-883c3f12-728a-457a-8cc2-27b88cd102a9 service nova] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Received event network-vif-deleted-0cc6f455-5ad2-4802-a0ff-42268fe50023 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 976.655322] env[63345]: INFO nova.compute.manager [req-db061011-0fd7-46c1-9ef2-768cfff0cd80 req-883c3f12-728a-457a-8cc2-27b88cd102a9 service nova] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Neutron deleted interface 0cc6f455-5ad2-4802-a0ff-42268fe50023; detaching it from the instance and deleting it from the info cache [ 976.655322] env[63345]: DEBUG nova.network.neutron [req-db061011-0fd7-46c1-9ef2-768cfff0cd80 req-883c3f12-728a-457a-8cc2-27b88cd102a9 service nova] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.661315] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8e2eea0b-900a-4dca-8b91-1589763811d5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.075s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.855897] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.034s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.861849] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.482s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.866139] env[63345]: INFO nova.compute.claims [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 976.887119] env[63345]: INFO nova.scheduler.client.report [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Deleted allocations for instance 070a834d-6478-4705-8df0-2a27c8780507 [ 976.906850] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017598, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.926587] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017599, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088494} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.927283] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 976.930022] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e4f5e3d-68d6-4670-bdfa-ef21bb3d4e3a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.957845] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] dd624e54-bd5b-4660-88a1-9d6f36560421/dd624e54-bd5b-4660-88a1-9d6f36560421.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 976.959035] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc7492ce-082c-4d9f-91c8-ff5e2c4d61ba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.981014] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 976.981014] env[63345]: value = "task-1017600" [ 976.981014] env[63345]: _type = "Task" [ 976.981014] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.993746] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017600, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.999906] env[63345]: DEBUG nova.network.neutron [-] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.095324] env[63345]: DEBUG oslo_vmware.api [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017597, 'name': PowerOnVM_Task, 'duration_secs': 1.157118} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.095324] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 977.095324] env[63345]: DEBUG nova.compute.manager [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 977.095324] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a61843-c491-4cfc-8c02-ae4ccaf5c155 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.156326] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cc206db5-3660-4524-8a16-2093d50308cb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.168338] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4457edac-22af-40dc-9e36-7e354981b2a4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.205720] env[63345]: DEBUG nova.compute.manager [req-db061011-0fd7-46c1-9ef2-768cfff0cd80 req-883c3f12-728a-457a-8cc2-27b88cd102a9 service nova] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Detach interface failed, port_id=0cc6f455-5ad2-4802-a0ff-42268fe50023, reason: Instance 0fe61754-458c-4c5c-bb2d-2677302e5fb9 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 977.404297] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bc8bacce-823e-4dbf-b8c4-cf24c91c47e6 tempest-ServersNegativeTestJSON-797332457 tempest-ServersNegativeTestJSON-797332457-project-member] Lock "070a834d-6478-4705-8df0-2a27c8780507" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.134s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.411988] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017598, 'name': CloneVM_Task} progress is 95%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.497625] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017600, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.503483] env[63345]: INFO nova.compute.manager [-] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Took 1.48 seconds to deallocate network for instance. [ 977.616298] env[63345]: DEBUG oslo_concurrency.lockutils [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.627042] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Acquiring lock "3d1e47c5-7e8c-417c-8c7c-009db666d391" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.627174] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Lock "3d1e47c5-7e8c-417c-8c7c-009db666d391" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.912567] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017598, 'name': CloneVM_Task, 'duration_secs': 1.786933} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.913032] env[63345]: INFO nova.virt.vmwareapi.vmops [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Created linked-clone VM from snapshot [ 977.913770] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47efe02f-aeff-4552-ab0a-1c2dad998457 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.923979] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Uploading image ed99e277-f7a0-4bd8-af6b-7b9bada72cca {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 977.944046] env[63345]: DEBUG oslo_vmware.rw_handles [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 977.944046] env[63345]: value = "vm-226129" [ 977.944046] env[63345]: _type = "VirtualMachine" [ 977.944046] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 977.944437] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-afeeae86-9736-4180-a12b-b2d95e2f4696 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.955412] env[63345]: DEBUG oslo_vmware.rw_handles [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lease: (returnval){ [ 977.955412] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c99714-1a57-d2ea-c871-2a45d8fad234" [ 977.955412] env[63345]: _type = "HttpNfcLease" [ 977.955412] env[63345]: } obtained for exporting VM: (result){ [ 977.955412] env[63345]: value = "vm-226129" [ 977.955412] env[63345]: _type = "VirtualMachine" [ 977.955412] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 977.955706] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the lease: (returnval){ [ 977.955706] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c99714-1a57-d2ea-c871-2a45d8fad234" [ 977.955706] env[63345]: _type = "HttpNfcLease" [ 977.955706] env[63345]: } to be ready. {{(pid=63345) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 977.974268] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 977.974268] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c99714-1a57-d2ea-c871-2a45d8fad234" [ 977.974268] env[63345]: _type = "HttpNfcLease" [ 977.974268] env[63345]: } is ready. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 977.974578] env[63345]: DEBUG oslo_vmware.rw_handles [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 977.974578] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c99714-1a57-d2ea-c871-2a45d8fad234" [ 977.974578] env[63345]: _type = "HttpNfcLease" [ 977.974578] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 977.975369] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a210e43-da48-428e-a9fe-1b5117e04dfb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.983722] env[63345]: DEBUG oslo_vmware.rw_handles [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5228ac55-e3cb-06cc-56b2-25afbf4d9fe6/disk-0.vmdk from lease info. {{(pid=63345) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 977.983926] env[63345]: DEBUG oslo_vmware.rw_handles [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5228ac55-e3cb-06cc-56b2-25afbf4d9fe6/disk-0.vmdk for reading. {{(pid=63345) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 978.048297] env[63345]: DEBUG oslo_concurrency.lockutils [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.054979] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017600, 'name': ReconfigVM_Task, 'duration_secs': 0.586831} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.057042] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Reconfigured VM instance instance-0000005f to attach disk [datastore2] dd624e54-bd5b-4660-88a1-9d6f36560421/dd624e54-bd5b-4660-88a1-9d6f36560421.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 978.057936] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1924ed96-3627-4b72-92b7-c0eea0798fb2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.067702] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 978.067702] env[63345]: value = "task-1017602" [ 978.067702] env[63345]: _type = "Task" [ 978.067702] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.080510] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017602, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.088507] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-330afa8f-4589-4273-aac1-4c1f92a84c0a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.132422] env[63345]: DEBUG nova.compute.manager [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 978.210058] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3af0681-0bb3-4e6d-9daa-b2489ed43f98 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.218620] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c8042e9-e169-4f2b-99d6-b71f58a600d2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.254741] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36c0551-4be0-4148-b9d3-8b19284fbfba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.263883] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1804b0bc-1ff4-47f2-8002-c461e3b91577 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.280178] env[63345]: DEBUG nova.compute.provider_tree [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.578531] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017602, 'name': Rename_Task, 'duration_secs': 0.227533} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.578793] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 978.579058] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f31c13a-302c-4888-be52-7bb1619de2d1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.587243] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 978.587243] env[63345]: value = "task-1017603" [ 978.587243] env[63345]: _type = "Task" [ 978.587243] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.596403] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017603, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.660031] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.692939] env[63345]: DEBUG nova.compute.manager [req-e31a6b65-8edd-4cdd-bb65-400bd017e6c6 req-0d5eff34-87df-40aa-8ab3-b9dfc3a0b2e9 service nova] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Received event network-changed-267ad158-547a-4d3a-a838-3d964626d731 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 978.693336] env[63345]: DEBUG nova.compute.manager [req-e31a6b65-8edd-4cdd-bb65-400bd017e6c6 req-0d5eff34-87df-40aa-8ab3-b9dfc3a0b2e9 service nova] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Refreshing instance network info cache due to event network-changed-267ad158-547a-4d3a-a838-3d964626d731. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 978.694259] env[63345]: DEBUG oslo_concurrency.lockutils [req-e31a6b65-8edd-4cdd-bb65-400bd017e6c6 req-0d5eff34-87df-40aa-8ab3-b9dfc3a0b2e9 service nova] Acquiring lock "refresh_cache-22a11cf9-8f85-4371-98eb-25b267c9aff7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.694801] env[63345]: DEBUG oslo_concurrency.lockutils [req-e31a6b65-8edd-4cdd-bb65-400bd017e6c6 req-0d5eff34-87df-40aa-8ab3-b9dfc3a0b2e9 service nova] Acquired lock "refresh_cache-22a11cf9-8f85-4371-98eb-25b267c9aff7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.698030] env[63345]: DEBUG nova.network.neutron [req-e31a6b65-8edd-4cdd-bb65-400bd017e6c6 req-0d5eff34-87df-40aa-8ab3-b9dfc3a0b2e9 service nova] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Refreshing network info cache for port 267ad158-547a-4d3a-a838-3d964626d731 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 978.783980] env[63345]: DEBUG nova.scheduler.client.report [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 979.042738] env[63345]: DEBUG oslo_concurrency.lockutils [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "4868a0a0-ca35-44b0-a90c-124aa366af76" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 979.042738] env[63345]: DEBUG oslo_concurrency.lockutils [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "4868a0a0-ca35-44b0-a90c-124aa366af76" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.099356] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017603, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.289379] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.430s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.290208] env[63345]: DEBUG nova.compute.manager [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 979.294255] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.306s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.296964] env[63345]: INFO nova.compute.claims [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 979.546247] env[63345]: INFO nova.compute.manager [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Detaching volume f3eb7f29-d3fd-4c7d-ab93-1582eb175324 [ 979.586903] env[63345]: INFO nova.virt.block_device [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Attempting to driver detach volume f3eb7f29-d3fd-4c7d-ab93-1582eb175324 from mountpoint /dev/sdb [ 979.587271] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Volume detach. Driver type: vmdk {{(pid=63345) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 979.587497] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226116', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'name': 'volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4868a0a0-ca35-44b0-a90c-124aa366af76', 'attached_at': '', 'detached_at': '', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'serial': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 979.588436] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93d1e0f-678e-4945-83ca-aeb074527f67 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.602652] env[63345]: DEBUG oslo_vmware.api [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017603, 'name': PowerOnVM_Task, 'duration_secs': 0.731091} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.620025] env[63345]: DEBUG nova.network.neutron [req-e31a6b65-8edd-4cdd-bb65-400bd017e6c6 req-0d5eff34-87df-40aa-8ab3-b9dfc3a0b2e9 service nova] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Updated VIF entry in instance network info cache for port 267ad158-547a-4d3a-a838-3d964626d731. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 979.620475] env[63345]: DEBUG nova.network.neutron [req-e31a6b65-8edd-4cdd-bb65-400bd017e6c6 req-0d5eff34-87df-40aa-8ab3-b9dfc3a0b2e9 service nova] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Updating instance_info_cache with network_info: [{"id": "267ad158-547a-4d3a-a838-3d964626d731", "address": "fa:16:3e:9e:ba:8d", "network": {"id": "13df4553-212e-4adb-8de0-da1acdf99671", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-238696814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4560e378b6aa47a3bbb5a2f7c5b76f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap267ad158-54", "ovs_interfaceid": "267ad158-547a-4d3a-a838-3d964626d731", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.625024] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 979.625024] env[63345]: INFO nova.compute.manager [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Took 9.03 seconds to spawn the instance on the hypervisor. [ 979.625024] env[63345]: DEBUG nova.compute.manager [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 979.625024] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-836b8a75-3fd9-4bfc-b82d-d52510275e62 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.629390] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcaf3095-40b0-4922-86ba-aa8ec6ced216 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.643405] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79bd12c2-9c56-43b8-8a6b-1597a75d70a9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.672334] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d779b23f-882a-40e7-8233-18cf80fccf93 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.696274] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] The volume has not been displaced from its original location: [datastore1] volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324/volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324.vmdk. No consolidation needed. {{(pid=63345) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 979.705861] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Reconfiguring VM instance instance-00000056 to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 979.707009] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04a165ef-4ee5-4203-b514-0fed6d8b4760 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.728158] env[63345]: DEBUG oslo_vmware.api [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 979.728158] env[63345]: value = "task-1017604" [ 979.728158] env[63345]: _type = "Task" [ 979.728158] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.738145] env[63345]: DEBUG oslo_vmware.api [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017604, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.804184] env[63345]: DEBUG nova.compute.utils [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 979.807629] env[63345]: DEBUG nova.compute.manager [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 979.807812] env[63345]: DEBUG nova.network.neutron [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 979.854176] env[63345]: DEBUG nova.policy [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fce058d27d8e4da19af436b282b37f32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63d7b3facae6416989f763e610cf98f7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 980.124559] env[63345]: DEBUG oslo_concurrency.lockutils [req-e31a6b65-8edd-4cdd-bb65-400bd017e6c6 req-0d5eff34-87df-40aa-8ab3-b9dfc3a0b2e9 service nova] Releasing lock "refresh_cache-22a11cf9-8f85-4371-98eb-25b267c9aff7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.161129] env[63345]: INFO nova.compute.manager [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Took 18.19 seconds to build instance. [ 980.240686] env[63345]: DEBUG oslo_vmware.api [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017604, 'name': ReconfigVM_Task, 'duration_secs': 0.311088} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.241018] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Reconfigured VM instance instance-00000056 to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 980.246392] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3b70cef-4f6d-4d1e-9f08-02ddd06c5398 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.257691] env[63345]: DEBUG nova.network.neutron [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Successfully created port: 15685195-e615-4386-8884-fbd1fd0e8221 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 980.269248] env[63345]: DEBUG oslo_vmware.api [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 980.269248] env[63345]: value = "task-1017605" [ 980.269248] env[63345]: _type = "Task" [ 980.269248] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.280972] env[63345]: DEBUG oslo_vmware.api [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017605, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.308990] env[63345]: DEBUG nova.compute.manager [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 980.537262] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9365e4-c54a-428f-a4ae-349c2b734f98 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.545885] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfdbc3a2-004f-462f-8ac9-480b96d393ab {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.578601] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e072e0c2-8756-457d-82f2-d3a8d28f322d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.587904] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c92cca4-51ee-47a6-9fdb-ea4470d1c936 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.604015] env[63345]: DEBUG nova.compute.provider_tree [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.662686] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b704d89-369f-4e05-802d-2da1b7aaee89 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "dd624e54-bd5b-4660-88a1-9d6f36560421" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.705s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.781012] env[63345]: DEBUG oslo_vmware.api [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.107388] env[63345]: DEBUG nova.scheduler.client.report [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 981.283234] env[63345]: DEBUG oslo_vmware.api [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017605, 'name': ReconfigVM_Task, 'duration_secs': 0.898653} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.283594] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226116', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'name': 'volume-f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4868a0a0-ca35-44b0-a90c-124aa366af76', 'attached_at': '', 'detached_at': '', 'volume_id': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324', 'serial': 'f3eb7f29-d3fd-4c7d-ab93-1582eb175324'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 981.321390] env[63345]: DEBUG nova.compute.manager [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 981.613619] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.319s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.614408] env[63345]: DEBUG nova.compute.manager [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 981.618186] env[63345]: DEBUG oslo_concurrency.lockutils [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 4.002s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.618515] env[63345]: DEBUG nova.objects.instance [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63345) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 981.835982] env[63345]: DEBUG nova.objects.instance [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lazy-loading 'flavor' on Instance uuid 4868a0a0-ca35-44b0-a90c-124aa366af76 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 982.123604] env[63345]: DEBUG nova.compute.utils [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 982.128234] env[63345]: DEBUG nova.compute.manager [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 982.128451] env[63345]: DEBUG nova.network.neutron [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 982.137706] env[63345]: DEBUG nova.virt.hardware [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 982.138253] env[63345]: DEBUG nova.virt.hardware [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 982.138253] env[63345]: DEBUG nova.virt.hardware [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 982.138397] env[63345]: DEBUG nova.virt.hardware [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 982.138520] env[63345]: DEBUG nova.virt.hardware [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 982.138672] env[63345]: DEBUG nova.virt.hardware [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 982.139374] env[63345]: DEBUG nova.virt.hardware [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 982.139707] env[63345]: DEBUG nova.virt.hardware [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 982.140051] env[63345]: DEBUG nova.virt.hardware [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 982.140331] env[63345]: DEBUG nova.virt.hardware [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 982.140547] env[63345]: DEBUG nova.virt.hardware [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 982.142777] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d0ba7a-e4f2-4f3c-b6b7-e15ed06c546a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.155826] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a74307-69c5-434f-be1d-491babb4755e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.162815] env[63345]: DEBUG oslo_vmware.rw_handles [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5264121a-3e6f-50e9-1d82-49be7794463d/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 982.163753] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6edc8231-0b26-496d-a89b-75ed01ca6250 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.189078] env[63345]: DEBUG oslo_vmware.rw_handles [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5264121a-3e6f-50e9-1d82-49be7794463d/disk-0.vmdk is in state: ready. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 982.189393] env[63345]: ERROR oslo_vmware.rw_handles [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5264121a-3e6f-50e9-1d82-49be7794463d/disk-0.vmdk due to incomplete transfer. [ 982.189547] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e4dd2c13-7054-4b28-b94c-9cc8e467d9d5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.196988] env[63345]: DEBUG nova.policy [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27fc4f99c7f44b1ea421bd8f13de6e43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '964cee117b3c4601b3afe82a8bb9c23e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 982.201597] env[63345]: DEBUG oslo_vmware.rw_handles [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5264121a-3e6f-50e9-1d82-49be7794463d/disk-0.vmdk. {{(pid=63345) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 982.201765] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Uploaded image 6b5e6190-95b5-4c2f-bc9f-f057f14d2ec4 to the Glance image server {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 982.204473] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Destroying the VM {{(pid=63345) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 982.205067] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-43c2524a-05d6-4bfb-8d98-de9185f9bcf9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.217031] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the task: (returnval){ [ 982.217031] env[63345]: value = "task-1017606" [ 982.217031] env[63345]: _type = "Task" [ 982.217031] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.226560] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017606, 'name': Destroy_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.507105] env[63345]: DEBUG nova.compute.manager [req-aca90c7f-0913-4fed-9d0e-0655e7aa82ed req-1940024d-9cd3-42da-9f26-3975f94f2324 service nova] [instance: 3d644f16-7924-4545-a528-1499a702d614] Received event network-vif-plugged-15685195-e615-4386-8884-fbd1fd0e8221 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 982.507105] env[63345]: DEBUG oslo_concurrency.lockutils [req-aca90c7f-0913-4fed-9d0e-0655e7aa82ed req-1940024d-9cd3-42da-9f26-3975f94f2324 service nova] Acquiring lock "3d644f16-7924-4545-a528-1499a702d614-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.507105] env[63345]: DEBUG oslo_concurrency.lockutils [req-aca90c7f-0913-4fed-9d0e-0655e7aa82ed req-1940024d-9cd3-42da-9f26-3975f94f2324 service nova] Lock "3d644f16-7924-4545-a528-1499a702d614-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.507105] env[63345]: DEBUG oslo_concurrency.lockutils [req-aca90c7f-0913-4fed-9d0e-0655e7aa82ed req-1940024d-9cd3-42da-9f26-3975f94f2324 service nova] Lock "3d644f16-7924-4545-a528-1499a702d614-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.507105] env[63345]: DEBUG nova.compute.manager [req-aca90c7f-0913-4fed-9d0e-0655e7aa82ed req-1940024d-9cd3-42da-9f26-3975f94f2324 service nova] [instance: 3d644f16-7924-4545-a528-1499a702d614] No waiting events found dispatching network-vif-plugged-15685195-e615-4386-8884-fbd1fd0e8221 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 982.507105] env[63345]: WARNING nova.compute.manager [req-aca90c7f-0913-4fed-9d0e-0655e7aa82ed req-1940024d-9cd3-42da-9f26-3975f94f2324 service nova] [instance: 3d644f16-7924-4545-a528-1499a702d614] Received unexpected event network-vif-plugged-15685195-e615-4386-8884-fbd1fd0e8221 for instance with vm_state building and task_state spawning. [ 982.509755] env[63345]: DEBUG nova.network.neutron [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Successfully created port: 8aa59061-70d8-466e-83b6-d91bcc0101d2 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 982.622268] env[63345]: DEBUG nova.network.neutron [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Successfully updated port: 15685195-e615-4386-8884-fbd1fd0e8221 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 982.629725] env[63345]: DEBUG nova.compute.manager [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 982.633686] env[63345]: DEBUG oslo_concurrency.lockutils [None req-638652b0-9dbb-436b-813d-a376974d6ba5 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.637642] env[63345]: DEBUG oslo_concurrency.lockutils [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.588s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.637642] env[63345]: DEBUG oslo_concurrency.lockutils [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.639971] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.980s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.642038] env[63345]: INFO nova.compute.claims [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 982.670551] env[63345]: INFO nova.scheduler.client.report [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Deleted allocations for instance 0fe61754-458c-4c5c-bb2d-2677302e5fb9 [ 982.687833] env[63345]: DEBUG nova.compute.manager [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 982.689356] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3731683-3938-4abd-8fe9-7025d6c7d07b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.732676] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017606, 'name': Destroy_Task} progress is 33%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.845906] env[63345]: DEBUG oslo_concurrency.lockutils [None req-91546b16-6f77-446a-8ad0-61982e27d96e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "4868a0a0-ca35-44b0-a90c-124aa366af76" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.804s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.099150] env[63345]: DEBUG oslo_concurrency.lockutils [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "4868a0a0-ca35-44b0-a90c-124aa366af76" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.099497] env[63345]: DEBUG oslo_concurrency.lockutils [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "4868a0a0-ca35-44b0-a90c-124aa366af76" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.099771] env[63345]: DEBUG oslo_concurrency.lockutils [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "4868a0a0-ca35-44b0-a90c-124aa366af76-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.100027] env[63345]: DEBUG oslo_concurrency.lockutils [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "4868a0a0-ca35-44b0-a90c-124aa366af76-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.100248] env[63345]: DEBUG oslo_concurrency.lockutils [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "4868a0a0-ca35-44b0-a90c-124aa366af76-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.102560] env[63345]: INFO nova.compute.manager [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Terminating instance [ 983.123966] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "refresh_cache-3d644f16-7924-4545-a528-1499a702d614" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.124125] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "refresh_cache-3d644f16-7924-4545-a528-1499a702d614" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.124549] env[63345]: DEBUG nova.network.neutron [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 983.181750] env[63345]: DEBUG oslo_concurrency.lockutils [None req-839ef603-27ea-4395-b87d-9ec34570ecec tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "0fe61754-458c-4c5c-bb2d-2677302e5fb9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.842s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.210071] env[63345]: INFO nova.compute.manager [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] instance snapshotting [ 983.210817] env[63345]: DEBUG nova.objects.instance [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lazy-loading 'flavor' on Instance uuid 726332dd-8699-49a4-a9ea-b9cbfc159855 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 983.226958] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017606, 'name': Destroy_Task, 'duration_secs': 0.623615} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.227265] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Destroyed the VM [ 983.227511] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Deleting Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 983.228439] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ef51c21c-2523-4f07-8acc-9b07fce64513 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.236634] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the task: (returnval){ [ 983.236634] env[63345]: value = "task-1017607" [ 983.236634] env[63345]: _type = "Task" [ 983.236634] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.246048] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017607, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.606463] env[63345]: DEBUG nova.compute.manager [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 983.606730] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 983.608196] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594ba61b-8a53-4578-99a7-b19874ed2968 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.617401] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 983.617523] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4e8ac3d-201d-4bdf-bd73-096d2c5e9acd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.633847] env[63345]: DEBUG oslo_vmware.api [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 983.633847] env[63345]: value = "task-1017608" [ 983.633847] env[63345]: _type = "Task" [ 983.633847] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.642559] env[63345]: DEBUG oslo_vmware.api [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017608, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.644795] env[63345]: DEBUG nova.compute.manager [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 983.684044] env[63345]: DEBUG nova.virt.hardware [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 983.684323] env[63345]: DEBUG nova.virt.hardware [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 983.684535] env[63345]: DEBUG nova.virt.hardware [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 983.684768] env[63345]: DEBUG nova.virt.hardware [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 983.684925] env[63345]: DEBUG nova.virt.hardware [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 983.685125] env[63345]: DEBUG nova.virt.hardware [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 983.685391] env[63345]: DEBUG nova.virt.hardware [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 983.685615] env[63345]: DEBUG nova.virt.hardware [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 983.685804] env[63345]: DEBUG nova.virt.hardware [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 983.685993] env[63345]: DEBUG nova.virt.hardware [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 983.686240] env[63345]: DEBUG nova.virt.hardware [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 983.687818] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd94f23-bb50-4fea-ad4e-dcac547d16af {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.692486] env[63345]: DEBUG nova.network.neutron [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 983.702755] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cddb2a6c-8518-4539-9f1f-b57f001bb59f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.736702] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddaa2da5-efe4-4e6c-bddc-965f2f4e4e7c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.767459] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017607, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.773440] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b196bb00-7868-4013-95ce-6f31e2f5cfc3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.951917] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.953200] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.970047] env[63345]: DEBUG nova.network.neutron [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Updating instance_info_cache with network_info: [{"id": "15685195-e615-4386-8884-fbd1fd0e8221", "address": "fa:16:3e:3c:c7:f0", "network": {"id": "f05df594-fc76-4e2d-b29b-6942fee8dc99", "bridge": "br-int", "label": "tempest-ServersTestJSON-241206779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63d7b3facae6416989f763e610cf98f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15685195-e6", "ovs_interfaceid": "15685195-e615-4386-8884-fbd1fd0e8221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.974838] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20b7df2-48c4-466a-820c-3a94cf4fbe94 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.984902] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7fa5e0-c660-458c-b082-b1d782dba1d5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.019489] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5770ec5e-8c9e-4222-bae8-0771975b8f08 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.028900] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f2604a-0f8e-42a7-8232-959acbbc0c9f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.045123] env[63345]: DEBUG nova.compute.provider_tree [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.047855] env[63345]: DEBUG nova.compute.manager [req-39bf6db9-81a0-407b-b0f4-0108a7b5f384 req-8801e147-5de0-449b-afca-e11a9336b918 service nova] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Received event network-vif-plugged-8aa59061-70d8-466e-83b6-d91bcc0101d2 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 984.048121] env[63345]: DEBUG oslo_concurrency.lockutils [req-39bf6db9-81a0-407b-b0f4-0108a7b5f384 req-8801e147-5de0-449b-afca-e11a9336b918 service nova] Acquiring lock "a8321259-b3a6-4e87-b13a-b964cf0dd766-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.048303] env[63345]: DEBUG oslo_concurrency.lockutils [req-39bf6db9-81a0-407b-b0f4-0108a7b5f384 req-8801e147-5de0-449b-afca-e11a9336b918 service nova] Lock "a8321259-b3a6-4e87-b13a-b964cf0dd766-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.048474] env[63345]: DEBUG oslo_concurrency.lockutils [req-39bf6db9-81a0-407b-b0f4-0108a7b5f384 req-8801e147-5de0-449b-afca-e11a9336b918 service nova] Lock "a8321259-b3a6-4e87-b13a-b964cf0dd766-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.048646] env[63345]: DEBUG nova.compute.manager [req-39bf6db9-81a0-407b-b0f4-0108a7b5f384 req-8801e147-5de0-449b-afca-e11a9336b918 service nova] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] No waiting events found dispatching network-vif-plugged-8aa59061-70d8-466e-83b6-d91bcc0101d2 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 984.048813] env[63345]: WARNING nova.compute.manager [req-39bf6db9-81a0-407b-b0f4-0108a7b5f384 req-8801e147-5de0-449b-afca-e11a9336b918 service nova] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Received unexpected event network-vif-plugged-8aa59061-70d8-466e-83b6-d91bcc0101d2 for instance with vm_state building and task_state spawning. [ 984.134463] env[63345]: DEBUG nova.network.neutron [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Successfully updated port: 8aa59061-70d8-466e-83b6-d91bcc0101d2 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 984.145295] env[63345]: DEBUG oslo_vmware.api [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017608, 'name': PowerOffVM_Task, 'duration_secs': 0.344031} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.145608] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 984.145896] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 984.146179] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bdd32e3e-3914-4846-9abc-7cceade04738 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.226354] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 984.226636] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 984.226890] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleting the datastore file [datastore2] 4868a0a0-ca35-44b0-a90c-124aa366af76 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 984.227222] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93d6d1ad-a5d3-4a20-a82c-eea2ec479dac {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.235602] env[63345]: DEBUG oslo_vmware.api [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 984.235602] env[63345]: value = "task-1017610" [ 984.235602] env[63345]: _type = "Task" [ 984.235602] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.246364] env[63345]: DEBUG oslo_vmware.api [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017610, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.251121] env[63345]: DEBUG oslo_vmware.api [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017607, 'name': RemoveSnapshot_Task, 'duration_secs': 1.001481} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.251401] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Deleted Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 984.251632] env[63345]: INFO nova.compute.manager [None req-c8e84880-7ed8-4cdb-80fe-e1e030177dee tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Took 16.91 seconds to snapshot the instance on the hypervisor. [ 984.289045] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Creating Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 984.289045] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e0b736bd-4b55-4d27-81c3-e29394d4d70e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.296084] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 984.296084] env[63345]: value = "task-1017611" [ 984.296084] env[63345]: _type = "Task" [ 984.296084] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.308111] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017611, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.459806] env[63345]: DEBUG nova.compute.manager [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 984.472488] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "refresh_cache-3d644f16-7924-4545-a528-1499a702d614" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.472802] env[63345]: DEBUG nova.compute.manager [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Instance network_info: |[{"id": "15685195-e615-4386-8884-fbd1fd0e8221", "address": "fa:16:3e:3c:c7:f0", "network": {"id": "f05df594-fc76-4e2d-b29b-6942fee8dc99", "bridge": "br-int", "label": "tempest-ServersTestJSON-241206779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63d7b3facae6416989f763e610cf98f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15685195-e6", "ovs_interfaceid": "15685195-e615-4386-8884-fbd1fd0e8221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 984.473317] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:c7:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15685195-e615-4386-8884-fbd1fd0e8221', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 984.481523] env[63345]: DEBUG oslo.service.loopingcall [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.481814] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d644f16-7924-4545-a528-1499a702d614] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 984.482724] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c195af2-6375-4526-af4b-f84b1c32a160 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.506050] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 984.506050] env[63345]: value = "task-1017612" [ 984.506050] env[63345]: _type = "Task" [ 984.506050] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.514793] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017612, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.538134] env[63345]: DEBUG nova.compute.manager [req-cfd03446-1b86-42b6-9eaa-844be434257a req-2c554948-ef56-4116-b693-484e0136ef04 service nova] [instance: 3d644f16-7924-4545-a528-1499a702d614] Received event network-changed-15685195-e615-4386-8884-fbd1fd0e8221 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 984.538403] env[63345]: DEBUG nova.compute.manager [req-cfd03446-1b86-42b6-9eaa-844be434257a req-2c554948-ef56-4116-b693-484e0136ef04 service nova] [instance: 3d644f16-7924-4545-a528-1499a702d614] Refreshing instance network info cache due to event network-changed-15685195-e615-4386-8884-fbd1fd0e8221. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 984.538710] env[63345]: DEBUG oslo_concurrency.lockutils [req-cfd03446-1b86-42b6-9eaa-844be434257a req-2c554948-ef56-4116-b693-484e0136ef04 service nova] Acquiring lock "refresh_cache-3d644f16-7924-4545-a528-1499a702d614" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.538899] env[63345]: DEBUG oslo_concurrency.lockutils [req-cfd03446-1b86-42b6-9eaa-844be434257a req-2c554948-ef56-4116-b693-484e0136ef04 service nova] Acquired lock "refresh_cache-3d644f16-7924-4545-a528-1499a702d614" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.539150] env[63345]: DEBUG nova.network.neutron [req-cfd03446-1b86-42b6-9eaa-844be434257a req-2c554948-ef56-4116-b693-484e0136ef04 service nova] [instance: 3d644f16-7924-4545-a528-1499a702d614] Refreshing network info cache for port 15685195-e615-4386-8884-fbd1fd0e8221 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 984.550438] env[63345]: DEBUG nova.scheduler.client.report [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 984.640534] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "refresh_cache-a8321259-b3a6-4e87-b13a-b964cf0dd766" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.640764] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "refresh_cache-a8321259-b3a6-4e87-b13a-b964cf0dd766" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.640977] env[63345]: DEBUG nova.network.neutron [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 984.747921] env[63345]: DEBUG oslo_vmware.api [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017610, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226753} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.748203] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 984.748506] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 984.748577] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 984.748743] env[63345]: INFO nova.compute.manager [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Took 1.14 seconds to destroy the instance on the hypervisor. [ 984.748982] env[63345]: DEBUG oslo.service.loopingcall [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.749194] env[63345]: DEBUG nova.compute.manager [-] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 984.749317] env[63345]: DEBUG nova.network.neutron [-] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 984.806780] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017611, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.989578] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.018594] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017612, 'name': CreateVM_Task, 'duration_secs': 0.422585} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.018989] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d644f16-7924-4545-a528-1499a702d614] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 985.020334] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.022635] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.022635] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 985.022635] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c9d6eb8-1242-476b-ab76-930fb5dcad42 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.028910] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 985.028910] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52804940-62f4-aaf6-1d08-dc5d75cd2e89" [ 985.028910] env[63345]: _type = "Task" [ 985.028910] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.039856] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52804940-62f4-aaf6-1d08-dc5d75cd2e89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.055892] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.416s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.058025] env[63345]: DEBUG nova.compute.manager [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 985.059176] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.070s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.062030] env[63345]: INFO nova.compute.claims [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 985.203412] env[63345]: DEBUG nova.network.neutron [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 985.316766] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017611, 'name': CreateSnapshot_Task, 'duration_secs': 0.814779} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.327191] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Created Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 985.327905] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81e84cf-f071-4ae5-bc27-3332ed7631b1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.509206] env[63345]: DEBUG nova.network.neutron [req-cfd03446-1b86-42b6-9eaa-844be434257a req-2c554948-ef56-4116-b693-484e0136ef04 service nova] [instance: 3d644f16-7924-4545-a528-1499a702d614] Updated VIF entry in instance network info cache for port 15685195-e615-4386-8884-fbd1fd0e8221. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 985.509206] env[63345]: DEBUG nova.network.neutron [req-cfd03446-1b86-42b6-9eaa-844be434257a req-2c554948-ef56-4116-b693-484e0136ef04 service nova] [instance: 3d644f16-7924-4545-a528-1499a702d614] Updating instance_info_cache with network_info: [{"id": "15685195-e615-4386-8884-fbd1fd0e8221", "address": "fa:16:3e:3c:c7:f0", "network": {"id": "f05df594-fc76-4e2d-b29b-6942fee8dc99", "bridge": "br-int", "label": "tempest-ServersTestJSON-241206779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63d7b3facae6416989f763e610cf98f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15685195-e6", "ovs_interfaceid": "15685195-e615-4386-8884-fbd1fd0e8221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.511288] env[63345]: DEBUG nova.network.neutron [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Updating instance_info_cache with network_info: [{"id": "8aa59061-70d8-466e-83b6-d91bcc0101d2", "address": "fa:16:3e:e1:71:5f", "network": {"id": "80bb8388-e130-46af-a4fc-1daea51d1bf5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1343573007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "964cee117b3c4601b3afe82a8bb9c23e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8aa59061-70", "ovs_interfaceid": "8aa59061-70d8-466e-83b6-d91bcc0101d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.541541] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52804940-62f4-aaf6-1d08-dc5d75cd2e89, 'name': SearchDatastore_Task, 'duration_secs': 0.011796} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.541935] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.542746] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 985.543058] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.543573] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.543573] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 985.543673] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07b5ae53-0b72-46a8-aef9-1c7a621b14f0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.553103] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 985.553318] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 985.554053] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b2b66ea-fdb8-4a3a-9232-5b35e484f394 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.559851] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 985.559851] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c3f8c6-f8e4-94cb-2d54-e66039909369" [ 985.559851] env[63345]: _type = "Task" [ 985.559851] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.567411] env[63345]: DEBUG nova.compute.utils [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 985.575080] env[63345]: DEBUG nova.compute.manager [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 985.575268] env[63345]: DEBUG nova.network.neutron [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 985.577053] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c3f8c6-f8e4-94cb-2d54-e66039909369, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.622417] env[63345]: DEBUG nova.policy [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eebecf7a13434b209ba5e86c5a40cd22', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '411cc504a8224ba581c36e04950875e9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 985.851393] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Creating linked-clone VM from snapshot {{(pid=63345) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 985.851727] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ad2c0732-f485-4f18-95fd-ce3ee10afd61 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.860658] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 985.860658] env[63345]: value = "task-1017613" [ 985.860658] env[63345]: _type = "Task" [ 985.860658] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.868846] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017613, 'name': CloneVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.962589] env[63345]: DEBUG nova.network.neutron [-] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.991020] env[63345]: DEBUG nova.network.neutron [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Successfully created port: 4dd934da-ef10-4472-8c9e-60266db841a4 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 986.011255] env[63345]: DEBUG oslo_concurrency.lockutils [req-cfd03446-1b86-42b6-9eaa-844be434257a req-2c554948-ef56-4116-b693-484e0136ef04 service nova] Releasing lock "refresh_cache-3d644f16-7924-4545-a528-1499a702d614" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.011545] env[63345]: DEBUG nova.compute.manager [req-cfd03446-1b86-42b6-9eaa-844be434257a req-2c554948-ef56-4116-b693-484e0136ef04 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Received event network-changed-8a3e5f64-f812-4c1b-a9e0-b8b3146a1467 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 986.011723] env[63345]: DEBUG nova.compute.manager [req-cfd03446-1b86-42b6-9eaa-844be434257a req-2c554948-ef56-4116-b693-484e0136ef04 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Refreshing instance network info cache due to event network-changed-8a3e5f64-f812-4c1b-a9e0-b8b3146a1467. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 986.011943] env[63345]: DEBUG oslo_concurrency.lockutils [req-cfd03446-1b86-42b6-9eaa-844be434257a req-2c554948-ef56-4116-b693-484e0136ef04 service nova] Acquiring lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.012354] env[63345]: DEBUG oslo_concurrency.lockutils [req-cfd03446-1b86-42b6-9eaa-844be434257a req-2c554948-ef56-4116-b693-484e0136ef04 service nova] Acquired lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.012354] env[63345]: DEBUG nova.network.neutron [req-cfd03446-1b86-42b6-9eaa-844be434257a req-2c554948-ef56-4116-b693-484e0136ef04 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Refreshing network info cache for port 8a3e5f64-f812-4c1b-a9e0-b8b3146a1467 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 986.014880] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "refresh_cache-a8321259-b3a6-4e87-b13a-b964cf0dd766" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.015200] env[63345]: DEBUG nova.compute.manager [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Instance network_info: |[{"id": "8aa59061-70d8-466e-83b6-d91bcc0101d2", "address": "fa:16:3e:e1:71:5f", "network": {"id": "80bb8388-e130-46af-a4fc-1daea51d1bf5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1343573007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "964cee117b3c4601b3afe82a8bb9c23e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8aa59061-70", "ovs_interfaceid": "8aa59061-70d8-466e-83b6-d91bcc0101d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 986.015816] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:71:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ddfb706a-add1-4e16-9ac4-d20b16a1df6d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8aa59061-70d8-466e-83b6-d91bcc0101d2', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 986.024132] env[63345]: DEBUG oslo.service.loopingcall [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 986.024349] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 986.024580] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-386c1694-cc61-4e46-acb1-64838035a13f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.045955] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 986.045955] env[63345]: value = "task-1017614" [ 986.045955] env[63345]: _type = "Task" [ 986.045955] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.054101] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017614, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.071144] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c3f8c6-f8e4-94cb-2d54-e66039909369, 'name': SearchDatastore_Task, 'duration_secs': 0.02978} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.074105] env[63345]: DEBUG nova.compute.manager [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 986.077877] env[63345]: DEBUG nova.compute.manager [req-04461207-130f-4d48-9ba6-af1419a17f69 req-8ee0dc7f-691c-434e-90ef-cea99c6583cb service nova] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Received event network-changed-8aa59061-70d8-466e-83b6-d91bcc0101d2 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 986.078252] env[63345]: DEBUG nova.compute.manager [req-04461207-130f-4d48-9ba6-af1419a17f69 req-8ee0dc7f-691c-434e-90ef-cea99c6583cb service nova] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Refreshing instance network info cache due to event network-changed-8aa59061-70d8-466e-83b6-d91bcc0101d2. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 986.078622] env[63345]: DEBUG oslo_concurrency.lockutils [req-04461207-130f-4d48-9ba6-af1419a17f69 req-8ee0dc7f-691c-434e-90ef-cea99c6583cb service nova] Acquiring lock "refresh_cache-a8321259-b3a6-4e87-b13a-b964cf0dd766" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.078883] env[63345]: DEBUG oslo_concurrency.lockutils [req-04461207-130f-4d48-9ba6-af1419a17f69 req-8ee0dc7f-691c-434e-90ef-cea99c6583cb service nova] Acquired lock "refresh_cache-a8321259-b3a6-4e87-b13a-b964cf0dd766" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.079191] env[63345]: DEBUG nova.network.neutron [req-04461207-130f-4d48-9ba6-af1419a17f69 req-8ee0dc7f-691c-434e-90ef-cea99c6583cb service nova] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Refreshing network info cache for port 8aa59061-70d8-466e-83b6-d91bcc0101d2 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 986.080865] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a662aa45-ac01-470b-ac6c-1244b8fb17c8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.091408] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 986.091408] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c6e0ec-c3e9-433d-3a14-85a03e7f62c8" [ 986.091408] env[63345]: _type = "Task" [ 986.091408] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.101116] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c6e0ec-c3e9-433d-3a14-85a03e7f62c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.296941] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.297246] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.315667] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068789b1-0569-4300-b79d-739e4acb0f4e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.324459] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d0aced-7c01-4b6a-8c3d-faae76ea5398 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.357183] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d2efdc-6981-4f13-8c04-00764faf8268 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.368236] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d082e12f-98c4-4678-aa38-f6679571bd53 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.375231] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017613, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.385478] env[63345]: DEBUG nova.compute.provider_tree [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 986.469028] env[63345]: INFO nova.compute.manager [-] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Took 1.72 seconds to deallocate network for instance. [ 986.558410] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017614, 'name': CreateVM_Task, 'duration_secs': 0.506163} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.559227] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 986.559525] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.559731] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.560179] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 986.560385] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b68ea91f-1208-49a7-9c60-7ac9530c5a83 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.565416] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 986.565416] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]522b6068-d182-2ba3-6627-a3c2c2480438" [ 986.565416] env[63345]: _type = "Task" [ 986.565416] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.573240] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]522b6068-d182-2ba3-6627-a3c2c2480438, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.601498] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c6e0ec-c3e9-433d-3a14-85a03e7f62c8, 'name': SearchDatastore_Task, 'duration_secs': 0.02353} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.601767] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.602067] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 3d644f16-7924-4545-a528-1499a702d614/3d644f16-7924-4545-a528-1499a702d614.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 986.602360] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98adb2f6-d24e-49c1-b6e0-c52978e63ef0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.609140] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 986.609140] env[63345]: value = "task-1017615" [ 986.609140] env[63345]: _type = "Task" [ 986.609140] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.624178] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017615, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.671695] env[63345]: DEBUG oslo_vmware.rw_handles [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5228ac55-e3cb-06cc-56b2-25afbf4d9fe6/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 986.672776] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b31c62-29b0-4d9e-84fe-02586af2d294 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.683062] env[63345]: DEBUG oslo_vmware.rw_handles [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5228ac55-e3cb-06cc-56b2-25afbf4d9fe6/disk-0.vmdk is in state: ready. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 986.683261] env[63345]: ERROR oslo_vmware.rw_handles [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5228ac55-e3cb-06cc-56b2-25afbf4d9fe6/disk-0.vmdk due to incomplete transfer. [ 986.683509] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4083b2d4-e4c0-4691-9fff-30d37e26dca7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.688700] env[63345]: DEBUG nova.compute.manager [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 986.689653] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1199158a-4a02-4c27-9d9d-c918d666218f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.693237] env[63345]: DEBUG oslo_vmware.rw_handles [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5228ac55-e3cb-06cc-56b2-25afbf4d9fe6/disk-0.vmdk. {{(pid=63345) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 986.693416] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Uploaded image ed99e277-f7a0-4bd8-af6b-7b9bada72cca to the Glance image server {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 986.695181] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Destroying the VM {{(pid=63345) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 986.695843] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-548dcf41-d2ca-4c18-bd9b-d5abba319080 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.713994] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 986.713994] env[63345]: value = "task-1017616" [ 986.713994] env[63345]: _type = "Task" [ 986.713994] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.727199] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017616, 'name': Destroy_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.807784] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.807950] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Starting heal instance info cache {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 986.825624] env[63345]: DEBUG nova.network.neutron [req-04461207-130f-4d48-9ba6-af1419a17f69 req-8ee0dc7f-691c-434e-90ef-cea99c6583cb service nova] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Updated VIF entry in instance network info cache for port 8aa59061-70d8-466e-83b6-d91bcc0101d2. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 986.826064] env[63345]: DEBUG nova.network.neutron [req-04461207-130f-4d48-9ba6-af1419a17f69 req-8ee0dc7f-691c-434e-90ef-cea99c6583cb service nova] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Updating instance_info_cache with network_info: [{"id": "8aa59061-70d8-466e-83b6-d91bcc0101d2", "address": "fa:16:3e:e1:71:5f", "network": {"id": "80bb8388-e130-46af-a4fc-1daea51d1bf5", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1343573007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "964cee117b3c4601b3afe82a8bb9c23e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8aa59061-70", "ovs_interfaceid": "8aa59061-70d8-466e-83b6-d91bcc0101d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.873141] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017613, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.914768] env[63345]: ERROR nova.scheduler.client.report [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [req-2466c148-3c82-494a-9cbd-99c20fed96ca] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2466c148-3c82-494a-9cbd-99c20fed96ca"}]} [ 986.937088] env[63345]: DEBUG nova.scheduler.client.report [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 986.957253] env[63345]: DEBUG nova.scheduler.client.report [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 986.957567] env[63345]: DEBUG nova.compute.provider_tree [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 986.977225] env[63345]: DEBUG nova.scheduler.client.report [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 986.980307] env[63345]: DEBUG oslo_concurrency.lockutils [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.003724] env[63345]: DEBUG nova.scheduler.client.report [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 987.078795] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]522b6068-d182-2ba3-6627-a3c2c2480438, 'name': SearchDatastore_Task, 'duration_secs': 0.038254} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.082337] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.082669] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 987.082958] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.083171] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.083410] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 987.085046] env[63345]: DEBUG nova.compute.manager [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 987.087036] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21ebd11d-d2cc-45e5-84a5-461bf9a17a75 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.105760] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 987.105760] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 987.106415] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8cd7a22-7d66-4aa9-a69e-9cc98b6632d6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.113440] env[63345]: DEBUG nova.virt.hardware [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 987.113440] env[63345]: DEBUG nova.virt.hardware [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 987.113440] env[63345]: DEBUG nova.virt.hardware [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 987.113440] env[63345]: DEBUG nova.virt.hardware [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 987.113732] env[63345]: DEBUG nova.virt.hardware [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 987.113732] env[63345]: DEBUG nova.virt.hardware [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 987.114011] env[63345]: DEBUG nova.virt.hardware [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 987.114215] env[63345]: DEBUG nova.virt.hardware [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 987.114424] env[63345]: DEBUG nova.virt.hardware [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 987.114608] env[63345]: DEBUG nova.virt.hardware [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 987.114865] env[63345]: DEBUG nova.virt.hardware [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 987.116247] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0249508e-3405-4f8b-b475-1bc670f85bf7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.126146] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 987.126146] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c4ce5d-fb04-9870-f9fb-fb51ce405109" [ 987.126146] env[63345]: _type = "Task" [ 987.126146] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.131397] env[63345]: DEBUG nova.network.neutron [req-cfd03446-1b86-42b6-9eaa-844be434257a req-2c554948-ef56-4116-b693-484e0136ef04 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Updated VIF entry in instance network info cache for port 8a3e5f64-f812-4c1b-a9e0-b8b3146a1467. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 987.131652] env[63345]: DEBUG nova.network.neutron [req-cfd03446-1b86-42b6-9eaa-844be434257a req-2c554948-ef56-4116-b693-484e0136ef04 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Updating instance_info_cache with network_info: [{"id": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "address": "fa:16:3e:55:5a:7c", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a3e5f64-f8", "ovs_interfaceid": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.139061] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017615, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.144885] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ffaf44-2d18-431c-a880-a9dcf6a0b4a5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.154335] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c4ce5d-fb04-9870-f9fb-fb51ce405109, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.208889] env[63345]: INFO nova.compute.manager [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] instance snapshotting [ 987.212190] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6bb24d-8585-4deb-a5dd-fb1533b32b7d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.223807] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017616, 'name': Destroy_Task} progress is 33%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.242263] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f915740c-1bd6-4a48-a4e4-7fec9094cef7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.264638] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0224162e-0035-43ed-9dcc-fc19cea324af {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.272371] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b0205e-83c5-4009-b25f-577a537db396 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.303280] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e2e0896-d4fb-4937-af64-a68c17ea51fd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.316849] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9d3cc7-d337-4926-94cf-3af6b59cd341 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.329609] env[63345]: DEBUG oslo_concurrency.lockutils [req-04461207-130f-4d48-9ba6-af1419a17f69 req-8ee0dc7f-691c-434e-90ef-cea99c6583cb service nova] Releasing lock "refresh_cache-a8321259-b3a6-4e87-b13a-b964cf0dd766" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.329883] env[63345]: DEBUG nova.compute.manager [req-04461207-130f-4d48-9ba6-af1419a17f69 req-8ee0dc7f-691c-434e-90ef-cea99c6583cb service nova] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Received event network-vif-deleted-277d5619-4a4c-4f02-9ce7-786f57c7dc46 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 987.330468] env[63345]: DEBUG nova.compute.provider_tree [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 987.372552] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017613, 'name': CloneVM_Task} progress is 95%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.508028] env[63345]: DEBUG nova.compute.manager [req-7a5cfa05-659b-4f2d-ac5b-14e36f3246df req-179c37d7-7f2b-4af9-85a3-56df6f87057a service nova] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Received event network-vif-plugged-4dd934da-ef10-4472-8c9e-60266db841a4 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 987.508149] env[63345]: DEBUG oslo_concurrency.lockutils [req-7a5cfa05-659b-4f2d-ac5b-14e36f3246df req-179c37d7-7f2b-4af9-85a3-56df6f87057a service nova] Acquiring lock "3d1e47c5-7e8c-417c-8c7c-009db666d391-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.508377] env[63345]: DEBUG oslo_concurrency.lockutils [req-7a5cfa05-659b-4f2d-ac5b-14e36f3246df req-179c37d7-7f2b-4af9-85a3-56df6f87057a service nova] Lock "3d1e47c5-7e8c-417c-8c7c-009db666d391-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.508557] env[63345]: DEBUG oslo_concurrency.lockutils [req-7a5cfa05-659b-4f2d-ac5b-14e36f3246df req-179c37d7-7f2b-4af9-85a3-56df6f87057a service nova] Lock "3d1e47c5-7e8c-417c-8c7c-009db666d391-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.508754] env[63345]: DEBUG nova.compute.manager [req-7a5cfa05-659b-4f2d-ac5b-14e36f3246df req-179c37d7-7f2b-4af9-85a3-56df6f87057a service nova] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] No waiting events found dispatching network-vif-plugged-4dd934da-ef10-4472-8c9e-60266db841a4 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 987.508944] env[63345]: WARNING nova.compute.manager [req-7a5cfa05-659b-4f2d-ac5b-14e36f3246df req-179c37d7-7f2b-4af9-85a3-56df6f87057a service nova] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Received unexpected event network-vif-plugged-4dd934da-ef10-4472-8c9e-60266db841a4 for instance with vm_state building and task_state spawning. [ 987.630479] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017615, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.604957} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.636589] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 3d644f16-7924-4545-a528-1499a702d614/3d644f16-7924-4545-a528-1499a702d614.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 987.636980] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 987.638224] env[63345]: DEBUG nova.network.neutron [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Successfully updated port: 4dd934da-ef10-4472-8c9e-60266db841a4 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 987.640433] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b003d893-91d2-431b-a707-fc53e180a6b0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.645517] env[63345]: DEBUG oslo_concurrency.lockutils [req-cfd03446-1b86-42b6-9eaa-844be434257a req-2c554948-ef56-4116-b693-484e0136ef04 service nova] Releasing lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.652756] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c4ce5d-fb04-9870-f9fb-fb51ce405109, 'name': SearchDatastore_Task, 'duration_secs': 0.05341} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.655909] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 987.655909] env[63345]: value = "task-1017617" [ 987.655909] env[63345]: _type = "Task" [ 987.655909] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.656126] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93f73c5f-9fee-4114-8183-6ff0757c1244 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.667779] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 987.667779] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]524335dd-ecdd-2482-2c09-ed5d6f358d4f" [ 987.667779] env[63345]: _type = "Task" [ 987.667779] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.671242] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017617, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.679076] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524335dd-ecdd-2482-2c09-ed5d6f358d4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.727079] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017616, 'name': Destroy_Task, 'duration_secs': 0.609656} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.727522] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Destroyed the VM [ 987.727776] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Deleting Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 987.728386] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2e5304f3-eef3-41a6-9fc0-7b574426d4cd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.735656] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 987.735656] env[63345]: value = "task-1017618" [ 987.735656] env[63345]: _type = "Task" [ 987.735656] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.744057] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017618, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.752980] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Creating Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 987.752980] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-626d0a01-6e00-4e73-ac2d-33e6e6aef9e5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.760089] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the task: (returnval){ [ 987.760089] env[63345]: value = "task-1017619" [ 987.760089] env[63345]: _type = "Task" [ 987.760089] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.768213] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017619, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.834466] env[63345]: DEBUG nova.scheduler.client.report [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 987.872856] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017613, 'name': CloneVM_Task, 'duration_secs': 2.001216} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.873255] env[63345]: INFO nova.virt.vmwareapi.vmops [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Created linked-clone VM from snapshot [ 987.874129] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfbd68ba-4085-4a1b-9747-3f95cdd50c13 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.881689] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Uploading image fdfac4b6-e678-4e72-af5e-15f172b83432 {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 987.907225] env[63345]: DEBUG oslo_vmware.rw_handles [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 987.907225] env[63345]: value = "vm-226132" [ 987.907225] env[63345]: _type = "VirtualMachine" [ 987.907225] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 987.907507] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c7804991-99e1-4c6b-a044-66f9f27febad {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.914185] env[63345]: DEBUG oslo_vmware.rw_handles [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lease: (returnval){ [ 987.914185] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52ebcc6c-20fd-0fac-91f3-909f0ceabcc7" [ 987.914185] env[63345]: _type = "HttpNfcLease" [ 987.914185] env[63345]: } obtained for exporting VM: (result){ [ 987.914185] env[63345]: value = "vm-226132" [ 987.914185] env[63345]: _type = "VirtualMachine" [ 987.914185] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 987.914437] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the lease: (returnval){ [ 987.914437] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52ebcc6c-20fd-0fac-91f3-909f0ceabcc7" [ 987.914437] env[63345]: _type = "HttpNfcLease" [ 987.914437] env[63345]: } to be ready. {{(pid=63345) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 987.922665] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 987.922665] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52ebcc6c-20fd-0fac-91f3-909f0ceabcc7" [ 987.922665] env[63345]: _type = "HttpNfcLease" [ 987.922665] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 988.141260] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Acquiring lock "refresh_cache-3d1e47c5-7e8c-417c-8c7c-009db666d391" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.141429] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Acquired lock "refresh_cache-3d1e47c5-7e8c-417c-8c7c-009db666d391" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.141587] env[63345]: DEBUG nova.network.neutron [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 988.168892] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017617, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075539} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.169179] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 988.169974] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f49870-914d-42de-9c1c-82562630269a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.181790] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524335dd-ecdd-2482-2c09-ed5d6f358d4f, 'name': SearchDatastore_Task, 'duration_secs': 0.01399} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.190685] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.190843] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] a8321259-b3a6-4e87-b13a-b964cf0dd766/a8321259-b3a6-4e87-b13a-b964cf0dd766.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 988.200246] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 3d644f16-7924-4545-a528-1499a702d614/3d644f16-7924-4545-a528-1499a702d614.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 988.200482] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a96dd51c-7068-44b2-b80a-6ea9581e666a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.202793] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e09ff02a-4f83-44e6-9761-3361fc96986c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.223086] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 988.223086] env[63345]: value = "task-1017621" [ 988.223086] env[63345]: _type = "Task" [ 988.223086] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.226096] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 988.226096] env[63345]: value = "task-1017622" [ 988.226096] env[63345]: _type = "Task" [ 988.226096] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.237077] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017621, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.243139] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017622, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.250075] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017618, 'name': RemoveSnapshot_Task, 'duration_secs': 0.478075} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.250376] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Deleted Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 988.250754] env[63345]: DEBUG nova.compute.manager [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 988.251683] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc69fec-c872-42c9-a253-b8478c6aadba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.269680] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017619, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.339733] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.280s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.340287] env[63345]: DEBUG nova.compute.manager [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 988.342857] env[63345]: DEBUG oslo_concurrency.lockutils [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.363s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.343560] env[63345]: DEBUG nova.objects.instance [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lazy-loading 'resources' on Instance uuid 4868a0a0-ca35-44b0-a90c-124aa366af76 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 988.422601] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 988.422601] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52ebcc6c-20fd-0fac-91f3-909f0ceabcc7" [ 988.422601] env[63345]: _type = "HttpNfcLease" [ 988.422601] env[63345]: } is ready. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 988.422899] env[63345]: DEBUG oslo_vmware.rw_handles [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 988.422899] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52ebcc6c-20fd-0fac-91f3-909f0ceabcc7" [ 988.422899] env[63345]: _type = "HttpNfcLease" [ 988.422899] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 988.423654] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da4980a-f99e-407d-b563-559db9d7d28b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.431113] env[63345]: DEBUG oslo_vmware.rw_handles [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521f398a-8ab7-676e-2f5f-026d1bb4fac4/disk-0.vmdk from lease info. {{(pid=63345) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 988.431295] env[63345]: DEBUG oslo_vmware.rw_handles [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521f398a-8ab7-676e-2f5f-026d1bb4fac4/disk-0.vmdk for reading. {{(pid=63345) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 988.664943] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6fe011b9-a46d-4413-8338-96d51e8b1e36 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.677825] env[63345]: DEBUG nova.network.neutron [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 988.738891] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017621, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.742170] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017622, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.767826] env[63345]: INFO nova.compute.manager [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Shelve offloading [ 988.772596] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017619, 'name': CreateSnapshot_Task, 'duration_secs': 0.528872} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.773664] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Created Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 988.774569] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20361b2-4335-4829-a8e7-051a20d0b110 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.846377] env[63345]: DEBUG nova.compute.utils [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 988.850399] env[63345]: DEBUG nova.compute.manager [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 988.850579] env[63345]: DEBUG nova.network.neutron [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 988.891994] env[63345]: DEBUG nova.network.neutron [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Updating instance_info_cache with network_info: [{"id": "4dd934da-ef10-4472-8c9e-60266db841a4", "address": "fa:16:3e:07:03:7c", "network": {"id": "8fdab47e-3475-4058-955d-79800fbcaf44", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-527970309-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "411cc504a8224ba581c36e04950875e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dd934da-ef", "ovs_interfaceid": "4dd934da-ef10-4472-8c9e-60266db841a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.905638] env[63345]: DEBUG nova.policy [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e36fd04030444217acadbbf4e4fe9be0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33c28bfca4da460e8ca96dc7519204c8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 989.074164] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd02c9a-fa88-4b5d-8f80-97a392bb9afe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.087935] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833c592f-5214-4573-bb14-dd7aa4000fb0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.137413] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30e2e3a-43ab-49ea-b5aa-97c004221df1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.148041] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-998ba4ab-7d95-431c-aba2-f7522278aee9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.164421] env[63345]: DEBUG nova.compute.provider_tree [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.238837] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017621, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530659} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.242487] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] a8321259-b3a6-4e87-b13a-b964cf0dd766/a8321259-b3a6-4e87-b13a-b964cf0dd766.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 989.242877] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 989.242994] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017622, 'name': ReconfigVM_Task, 'duration_secs': 0.598783} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.243225] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc1e4997-6c08-4aa0-ba09-77ec25856dc2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.249019] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 3d644f16-7924-4545-a528-1499a702d614/3d644f16-7924-4545-a528-1499a702d614.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 989.249019] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4d49f472-2214-4cae-a2b3-ee8ce6206e68 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.254170] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 989.254170] env[63345]: value = "task-1017624" [ 989.254170] env[63345]: _type = "Task" [ 989.254170] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.255738] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 989.255738] env[63345]: value = "task-1017623" [ 989.255738] env[63345]: _type = "Task" [ 989.255738] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.271471] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017624, 'name': Rename_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.276142] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 989.276841] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017623, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.278351] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-100d746f-915d-4ca2-9a8a-18603abfae56 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.285365] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 989.285365] env[63345]: value = "task-1017625" [ 989.285365] env[63345]: _type = "Task" [ 989.285365] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.295205] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Creating linked-clone VM from snapshot {{(pid=63345) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 989.295962] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-90c5932a-5789-4349-aec9-e58336fa367a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.307167] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] VM already powered off {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 989.307473] env[63345]: DEBUG nova.compute.manager [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 989.308241] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the task: (returnval){ [ 989.308241] env[63345]: value = "task-1017626" [ 989.308241] env[63345]: _type = "Task" [ 989.308241] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.309831] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5a4009-d53c-4f59-852d-c971f3000c70 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.317886] env[63345]: DEBUG nova.network.neutron [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Successfully created port: d6e5e759-86e1-4f76-9b65-19b2691780df {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 989.325751] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "refresh_cache-732ac30c-15c1-4c57-bb70-ea3ed51f646b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.325751] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "refresh_cache-732ac30c-15c1-4c57-bb70-ea3ed51f646b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.325751] env[63345]: DEBUG nova.network.neutron [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 989.333382] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017626, 'name': CloneVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.353604] env[63345]: DEBUG nova.compute.manager [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 989.394398] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Releasing lock "refresh_cache-3d1e47c5-7e8c-417c-8c7c-009db666d391" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.394740] env[63345]: DEBUG nova.compute.manager [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Instance network_info: |[{"id": "4dd934da-ef10-4472-8c9e-60266db841a4", "address": "fa:16:3e:07:03:7c", "network": {"id": "8fdab47e-3475-4058-955d-79800fbcaf44", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-527970309-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "411cc504a8224ba581c36e04950875e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dd934da-ef", "ovs_interfaceid": "4dd934da-ef10-4472-8c9e-60266db841a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 989.395289] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:03:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8039f411-8c97-48fe-a5a9-9f5a42e4e7c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4dd934da-ef10-4472-8c9e-60266db841a4', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 989.407444] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Creating folder: Project (411cc504a8224ba581c36e04950875e9). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 989.409084] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e989aab8-c380-426b-a226-a286631588ee {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.420076] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Created folder: Project (411cc504a8224ba581c36e04950875e9) in parent group-v225918. [ 989.420356] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Creating folder: Instances. Parent ref: group-v226136. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 989.420611] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa865694-f722-4d2b-a52e-de5eb45991c7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.432854] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Created folder: Instances in parent group-v226136. [ 989.433143] env[63345]: DEBUG oslo.service.loopingcall [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 989.433386] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 989.433618] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-889dca74-f1a4-4e09-a3e7-eba4a046a65e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.456224] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 989.456224] env[63345]: value = "task-1017629" [ 989.456224] env[63345]: _type = "Task" [ 989.456224] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.470528] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017629, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.642414] env[63345]: DEBUG nova.compute.manager [req-777cf3f6-2d8e-44f6-b466-134b863b265a req-8f485f86-7f68-47e3-87d4-be8f5703ea26 service nova] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Received event network-changed-4dd934da-ef10-4472-8c9e-60266db841a4 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 989.642620] env[63345]: DEBUG nova.compute.manager [req-777cf3f6-2d8e-44f6-b466-134b863b265a req-8f485f86-7f68-47e3-87d4-be8f5703ea26 service nova] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Refreshing instance network info cache due to event network-changed-4dd934da-ef10-4472-8c9e-60266db841a4. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 989.642856] env[63345]: DEBUG oslo_concurrency.lockutils [req-777cf3f6-2d8e-44f6-b466-134b863b265a req-8f485f86-7f68-47e3-87d4-be8f5703ea26 service nova] Acquiring lock "refresh_cache-3d1e47c5-7e8c-417c-8c7c-009db666d391" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.642985] env[63345]: DEBUG oslo_concurrency.lockutils [req-777cf3f6-2d8e-44f6-b466-134b863b265a req-8f485f86-7f68-47e3-87d4-be8f5703ea26 service nova] Acquired lock "refresh_cache-3d1e47c5-7e8c-417c-8c7c-009db666d391" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.643635] env[63345]: DEBUG nova.network.neutron [req-777cf3f6-2d8e-44f6-b466-134b863b265a req-8f485f86-7f68-47e3-87d4-be8f5703ea26 service nova] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Refreshing network info cache for port 4dd934da-ef10-4472-8c9e-60266db841a4 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 989.667460] env[63345]: DEBUG nova.scheduler.client.report [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 989.767959] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017624, 'name': Rename_Task, 'duration_secs': 0.21662} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.768895] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 989.769248] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ecd3417-8004-4340-b97e-e739708569bb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.774352] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017623, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07901} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.775147] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 989.776368] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abbf5b7a-54a7-4c53-b409-973ed9beb78a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.779950] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 989.779950] env[63345]: value = "task-1017630" [ 989.779950] env[63345]: _type = "Task" [ 989.779950] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.803830] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] a8321259-b3a6-4e87-b13a-b964cf0dd766/a8321259-b3a6-4e87-b13a-b964cf0dd766.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.807210] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ac19fd8-d7a5-4570-8dc6-404a9e3e93ee {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.824759] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017630, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.841437] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 989.841437] env[63345]: value = "task-1017631" [ 989.841437] env[63345]: _type = "Task" [ 989.841437] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.842240] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017626, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.854057] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017631, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.966931] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017629, 'name': CreateVM_Task, 'duration_secs': 0.50573} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.967326] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 989.970808] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.970808] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.970808] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 989.970808] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e42ff141-4bc9-4b3d-9659-978ed34fd74b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.976396] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Waiting for the task: (returnval){ [ 989.976396] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52733b8d-7cf3-2e56-e133-473fd41ee88e" [ 989.976396] env[63345]: _type = "Task" [ 989.976396] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.984895] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52733b8d-7cf3-2e56-e133-473fd41ee88e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.171408] env[63345]: DEBUG nova.network.neutron [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Updating instance_info_cache with network_info: [{"id": "4b958d52-f058-41bc-a29e-1c8a2749ac18", "address": "fa:16:3e:c1:c5:58", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b958d52-f0", "ovs_interfaceid": "4b958d52-f058-41bc-a29e-1c8a2749ac18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.173311] env[63345]: DEBUG oslo_concurrency.lockutils [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.830s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.200186] env[63345]: INFO nova.scheduler.client.report [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleted allocations for instance 4868a0a0-ca35-44b0-a90c-124aa366af76 [ 990.296099] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017630, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.341512] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017626, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.354408] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017631, 'name': ReconfigVM_Task, 'duration_secs': 0.482995} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.354408] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Reconfigured VM instance instance-00000061 to attach disk [datastore2] a8321259-b3a6-4e87-b13a-b964cf0dd766/a8321259-b3a6-4e87-b13a-b964cf0dd766.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 990.354639] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64321453-ad31-4d2e-baae-2170dd142f74 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.361659] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 990.361659] env[63345]: value = "task-1017632" [ 990.361659] env[63345]: _type = "Task" [ 990.361659] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.366752] env[63345]: DEBUG nova.compute.manager [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 990.375642] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017632, 'name': Rename_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.386833] env[63345]: DEBUG nova.network.neutron [req-777cf3f6-2d8e-44f6-b466-134b863b265a req-8f485f86-7f68-47e3-87d4-be8f5703ea26 service nova] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Updated VIF entry in instance network info cache for port 4dd934da-ef10-4472-8c9e-60266db841a4. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 990.387347] env[63345]: DEBUG nova.network.neutron [req-777cf3f6-2d8e-44f6-b466-134b863b265a req-8f485f86-7f68-47e3-87d4-be8f5703ea26 service nova] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Updating instance_info_cache with network_info: [{"id": "4dd934da-ef10-4472-8c9e-60266db841a4", "address": "fa:16:3e:07:03:7c", "network": {"id": "8fdab47e-3475-4058-955d-79800fbcaf44", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-527970309-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "411cc504a8224ba581c36e04950875e9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dd934da-ef", "ovs_interfaceid": "4dd934da-ef10-4472-8c9e-60266db841a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.396033] env[63345]: DEBUG nova.virt.hardware [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 990.396283] env[63345]: DEBUG nova.virt.hardware [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 990.396461] env[63345]: DEBUG nova.virt.hardware [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 990.396661] env[63345]: DEBUG nova.virt.hardware [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 990.396827] env[63345]: DEBUG nova.virt.hardware [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 990.396991] env[63345]: DEBUG nova.virt.hardware [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 990.397494] env[63345]: DEBUG nova.virt.hardware [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 990.397572] env[63345]: DEBUG nova.virt.hardware [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 990.397748] env[63345]: DEBUG nova.virt.hardware [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 990.398034] env[63345]: DEBUG nova.virt.hardware [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 990.398276] env[63345]: DEBUG nova.virt.hardware [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 990.400008] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3502779f-887b-47e7-96f2-ea50cc149472 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.410448] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a03bdf4-cbd7-4dbb-b5ff-a6003f4cbab7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.488461] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52733b8d-7cf3-2e56-e133-473fd41ee88e, 'name': SearchDatastore_Task, 'duration_secs': 0.018559} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.490114] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.490114] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 990.490114] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.490114] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.490114] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 990.490413] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6f76961-881c-4fee-84a8-af7efb2e057e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.499038] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 990.499263] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 990.499952] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1b71129-1639-48e3-8465-fed00727e07a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.505490] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Waiting for the task: (returnval){ [ 990.505490] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52a98d2c-2964-ace2-0c94-37dfdb2745a8" [ 990.505490] env[63345]: _type = "Task" [ 990.505490] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.515183] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a98d2c-2964-ace2-0c94-37dfdb2745a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.677106] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "refresh_cache-732ac30c-15c1-4c57-bb70-ea3ed51f646b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.710983] env[63345]: DEBUG oslo_concurrency.lockutils [None req-573572fd-d608-4c7c-a5a7-50f44cb4cd6a tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "4868a0a0-ca35-44b0-a90c-124aa366af76" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.611s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.793679] env[63345]: DEBUG oslo_vmware.api [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017630, 'name': PowerOnVM_Task, 'duration_secs': 0.707196} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.794040] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 990.794350] env[63345]: INFO nova.compute.manager [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Took 9.47 seconds to spawn the instance on the hypervisor. [ 990.794563] env[63345]: DEBUG nova.compute.manager [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 990.795851] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441f6026-8ee5-48aa-b3ae-49f22b037fdc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.838510] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017626, 'name': CloneVM_Task, 'duration_secs': 1.406509} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.839455] env[63345]: INFO nova.virt.vmwareapi.vmops [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Created linked-clone VM from snapshot [ 990.840292] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f1aa7a-92dd-4e30-8b4d-072e37c1395d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.855692] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Uploading image 8d5449ef-36c9-46f7-8cb0-2b5ee0ade1fd {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 990.871168] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017632, 'name': Rename_Task, 'duration_secs': 0.221415} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.871683] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 990.871940] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-440b630a-f53f-4b4c-9b49-24850d4044be {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.880602] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 990.880602] env[63345]: value = "task-1017633" [ 990.880602] env[63345]: _type = "Task" [ 990.880602] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.886565] env[63345]: DEBUG oslo_vmware.rw_handles [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 990.886565] env[63345]: value = "vm-226135" [ 990.886565] env[63345]: _type = "VirtualMachine" [ 990.886565] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 990.886827] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-13ee12ce-c387-44dd-a161-6ae880268bfb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.888989] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "refresh_cache-869f8110-6490-4a47-955a-0ce085f826af" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.890021] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquired lock "refresh_cache-869f8110-6490-4a47-955a-0ce085f826af" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.890021] env[63345]: DEBUG nova.network.neutron [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Forcefully refreshing network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 990.893959] env[63345]: DEBUG oslo_concurrency.lockutils [req-777cf3f6-2d8e-44f6-b466-134b863b265a req-8f485f86-7f68-47e3-87d4-be8f5703ea26 service nova] Releasing lock "refresh_cache-3d1e47c5-7e8c-417c-8c7c-009db666d391" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.894656] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017633, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.899414] env[63345]: DEBUG oslo_vmware.rw_handles [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Lease: (returnval){ [ 990.899414] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f6abf4-6190-ad27-3029-9a1efeff3490" [ 990.899414] env[63345]: _type = "HttpNfcLease" [ 990.899414] env[63345]: } obtained for exporting VM: (result){ [ 990.899414] env[63345]: value = "vm-226135" [ 990.899414] env[63345]: _type = "VirtualMachine" [ 990.899414] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 990.899719] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the lease: (returnval){ [ 990.899719] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f6abf4-6190-ad27-3029-9a1efeff3490" [ 990.899719] env[63345]: _type = "HttpNfcLease" [ 990.899719] env[63345]: } to be ready. {{(pid=63345) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 990.909317] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 990.909317] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f6abf4-6190-ad27-3029-9a1efeff3490" [ 990.909317] env[63345]: _type = "HttpNfcLease" [ 990.909317] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 991.016272] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a98d2c-2964-ace2-0c94-37dfdb2745a8, 'name': SearchDatastore_Task, 'duration_secs': 0.012846} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.017121] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce429578-4f2b-464b-9daa-6534e4bf72ca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.029516] env[63345]: DEBUG nova.network.neutron [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Successfully updated port: d6e5e759-86e1-4f76-9b65-19b2691780df {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 991.032924] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Waiting for the task: (returnval){ [ 991.032924] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52892a91-38b9-6071-b7ce-71f241c73ec2" [ 991.032924] env[63345]: _type = "Task" [ 991.032924] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.041895] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52892a91-38b9-6071-b7ce-71f241c73ec2, 'name': SearchDatastore_Task, 'duration_secs': 0.015856} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.042209] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.042481] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 3d1e47c5-7e8c-417c-8c7c-009db666d391/3d1e47c5-7e8c-417c-8c7c-009db666d391.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 991.042747] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77f0fff1-882e-493c-8b50-13c0bfda8568 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.049569] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Waiting for the task: (returnval){ [ 991.049569] env[63345]: value = "task-1017635" [ 991.049569] env[63345]: _type = "Task" [ 991.049569] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.059920] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017635, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.137619] env[63345]: DEBUG nova.compute.manager [req-1910dc3b-c678-4046-9f25-320659715c11 req-be80fa27-b2fe-4361-9a40-6406a55a8519 service nova] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Received event network-vif-unplugged-4b958d52-f058-41bc-a29e-1c8a2749ac18 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 991.137853] env[63345]: DEBUG oslo_concurrency.lockutils [req-1910dc3b-c678-4046-9f25-320659715c11 req-be80fa27-b2fe-4361-9a40-6406a55a8519 service nova] Acquiring lock "732ac30c-15c1-4c57-bb70-ea3ed51f646b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.138084] env[63345]: DEBUG oslo_concurrency.lockutils [req-1910dc3b-c678-4046-9f25-320659715c11 req-be80fa27-b2fe-4361-9a40-6406a55a8519 service nova] Lock "732ac30c-15c1-4c57-bb70-ea3ed51f646b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.138263] env[63345]: DEBUG oslo_concurrency.lockutils [req-1910dc3b-c678-4046-9f25-320659715c11 req-be80fa27-b2fe-4361-9a40-6406a55a8519 service nova] Lock "732ac30c-15c1-4c57-bb70-ea3ed51f646b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.138436] env[63345]: DEBUG nova.compute.manager [req-1910dc3b-c678-4046-9f25-320659715c11 req-be80fa27-b2fe-4361-9a40-6406a55a8519 service nova] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] No waiting events found dispatching network-vif-unplugged-4b958d52-f058-41bc-a29e-1c8a2749ac18 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 991.138613] env[63345]: WARNING nova.compute.manager [req-1910dc3b-c678-4046-9f25-320659715c11 req-be80fa27-b2fe-4361-9a40-6406a55a8519 service nova] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Received unexpected event network-vif-unplugged-4b958d52-f058-41bc-a29e-1c8a2749ac18 for instance with vm_state shelved and task_state shelving_offloading. [ 991.187660] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 991.188593] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7cc05e7-6fdb-4478-9f8c-0c78b9b217db {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.196093] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 991.196369] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50f2f988-371a-41c6-8f74-c3311fdc0afa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.270020] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 991.270020] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 991.270020] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleting the datastore file [datastore2] 732ac30c-15c1-4c57-bb70-ea3ed51f646b {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 991.270020] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ff8b166-05ad-47a6-bac7-676e2344316a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.277829] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 991.277829] env[63345]: value = "task-1017637" [ 991.277829] env[63345]: _type = "Task" [ 991.277829] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.286363] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017637, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.318533] env[63345]: INFO nova.compute.manager [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Took 17.98 seconds to build instance. [ 991.394180] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017633, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.408329] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 991.408329] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f6abf4-6190-ad27-3029-9a1efeff3490" [ 991.408329] env[63345]: _type = "HttpNfcLease" [ 991.408329] env[63345]: } is ready. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 991.410306] env[63345]: DEBUG oslo_vmware.rw_handles [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 991.410306] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f6abf4-6190-ad27-3029-9a1efeff3490" [ 991.410306] env[63345]: _type = "HttpNfcLease" [ 991.410306] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 991.410306] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be5d760-ed6e-4110-8e18-01526d43949d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.417525] env[63345]: DEBUG oslo_vmware.rw_handles [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526f5a24-b102-1e3b-9f2e-6fd60fcf4ff3/disk-0.vmdk from lease info. {{(pid=63345) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 991.417720] env[63345]: DEBUG oslo_vmware.rw_handles [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526f5a24-b102-1e3b-9f2e-6fd60fcf4ff3/disk-0.vmdk for reading. {{(pid=63345) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 991.535081] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.535645] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.536025] env[63345]: DEBUG nova.network.neutron [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 991.540728] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6af0f859-c99a-4093-92b4-13dddfece6c6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.562728] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017635, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.672462] env[63345]: DEBUG nova.compute.manager [req-9899b4b3-f243-425c-9773-ca2dfa92af17 req-fca0124a-d892-4677-9e92-21ac3279c711 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Received event network-vif-plugged-d6e5e759-86e1-4f76-9b65-19b2691780df {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 991.672830] env[63345]: DEBUG oslo_concurrency.lockutils [req-9899b4b3-f243-425c-9773-ca2dfa92af17 req-fca0124a-d892-4677-9e92-21ac3279c711 service nova] Acquiring lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.672944] env[63345]: DEBUG oslo_concurrency.lockutils [req-9899b4b3-f243-425c-9773-ca2dfa92af17 req-fca0124a-d892-4677-9e92-21ac3279c711 service nova] Lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.673220] env[63345]: DEBUG oslo_concurrency.lockutils [req-9899b4b3-f243-425c-9773-ca2dfa92af17 req-fca0124a-d892-4677-9e92-21ac3279c711 service nova] Lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.673462] env[63345]: DEBUG nova.compute.manager [req-9899b4b3-f243-425c-9773-ca2dfa92af17 req-fca0124a-d892-4677-9e92-21ac3279c711 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] No waiting events found dispatching network-vif-plugged-d6e5e759-86e1-4f76-9b65-19b2691780df {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 991.673663] env[63345]: WARNING nova.compute.manager [req-9899b4b3-f243-425c-9773-ca2dfa92af17 req-fca0124a-d892-4677-9e92-21ac3279c711 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Received unexpected event network-vif-plugged-d6e5e759-86e1-4f76-9b65-19b2691780df for instance with vm_state building and task_state spawning. [ 991.673859] env[63345]: DEBUG nova.compute.manager [req-9899b4b3-f243-425c-9773-ca2dfa92af17 req-fca0124a-d892-4677-9e92-21ac3279c711 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Received event network-changed-d6e5e759-86e1-4f76-9b65-19b2691780df {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 991.674086] env[63345]: DEBUG nova.compute.manager [req-9899b4b3-f243-425c-9773-ca2dfa92af17 req-fca0124a-d892-4677-9e92-21ac3279c711 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Refreshing instance network info cache due to event network-changed-d6e5e759-86e1-4f76-9b65-19b2691780df. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 991.674379] env[63345]: DEBUG oslo_concurrency.lockutils [req-9899b4b3-f243-425c-9773-ca2dfa92af17 req-fca0124a-d892-4677-9e92-21ac3279c711 service nova] Acquiring lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.789365] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017637, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.821254] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5d442c36-8d52-4774-b861-f9aa0b059c41 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "3d644f16-7924-4545-a528-1499a702d614" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.503s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.892029] env[63345]: DEBUG oslo_vmware.api [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017633, 'name': PowerOnVM_Task, 'duration_secs': 0.69194} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.892029] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 991.892307] env[63345]: INFO nova.compute.manager [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Took 8.25 seconds to spawn the instance on the hypervisor. [ 991.892494] env[63345]: DEBUG nova.compute.manager [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 991.893365] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c8200c-54de-4184-92b0-342b9ec32d2d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.074697] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017635, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.586012} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.076263] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 3d1e47c5-7e8c-417c-8c7c-009db666d391/3d1e47c5-7e8c-417c-8c7c-009db666d391.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 992.078538] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 992.079054] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-42d9088c-ac1c-4a09-a1b3-f214c7f602a1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.088427] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Waiting for the task: (returnval){ [ 992.088427] env[63345]: value = "task-1017638" [ 992.088427] env[63345]: _type = "Task" [ 992.088427] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.091720] env[63345]: DEBUG nova.network.neutron [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 992.107770] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017638, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.216891] env[63345]: DEBUG nova.network.neutron [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Updating instance_info_cache with network_info: [{"id": "9b0555db-b627-44ae-8812-42415d554cde", "address": "fa:16:3e:4c:4f:70", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b0555db-b6", "ovs_interfaceid": "9b0555db-b627-44ae-8812-42415d554cde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.290056] env[63345]: DEBUG oslo_vmware.api [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017637, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.548986} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.290460] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 992.290679] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 992.290952] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 992.309561] env[63345]: DEBUG nova.network.neutron [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Updating instance_info_cache with network_info: [{"id": "d6e5e759-86e1-4f76-9b65-19b2691780df", "address": "fa:16:3e:21:49:6e", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6e5e759-86", "ovs_interfaceid": "d6e5e759-86e1-4f76-9b65-19b2691780df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.318514] env[63345]: INFO nova.scheduler.client.report [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleted allocations for instance 732ac30c-15c1-4c57-bb70-ea3ed51f646b [ 992.414078] env[63345]: INFO nova.compute.manager [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Took 16.45 seconds to build instance. [ 992.601416] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017638, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.127374} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.601717] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 992.602649] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8322f426-4a36-4ec0-b9e4-f1be0cfd569b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.624439] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 3d1e47c5-7e8c-417c-8c7c-009db666d391/3d1e47c5-7e8c-417c-8c7c-009db666d391.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 992.625256] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb50faae-f912-4e09-afba-da5dd1fd487f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.645832] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Waiting for the task: (returnval){ [ 992.645832] env[63345]: value = "task-1017639" [ 992.645832] env[63345]: _type = "Task" [ 992.645832] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.655370] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017639, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.720183] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Releasing lock "refresh_cache-869f8110-6490-4a47-955a-0ce085f826af" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.720833] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Updated the network info_cache for instance {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 992.722073] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.722165] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.722397] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.722657] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.723113] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.723113] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.723213] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63345) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 992.723490] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager.update_available_resource {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.762721] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.763251] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.817932] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.818462] env[63345]: DEBUG nova.compute.manager [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Instance network_info: |[{"id": "d6e5e759-86e1-4f76-9b65-19b2691780df", "address": "fa:16:3e:21:49:6e", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6e5e759-86", "ovs_interfaceid": "d6e5e759-86e1-4f76-9b65-19b2691780df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 992.818904] env[63345]: DEBUG oslo_concurrency.lockutils [req-9899b4b3-f243-425c-9773-ca2dfa92af17 req-fca0124a-d892-4677-9e92-21ac3279c711 service nova] Acquired lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.819146] env[63345]: DEBUG nova.network.neutron [req-9899b4b3-f243-425c-9773-ca2dfa92af17 req-fca0124a-d892-4677-9e92-21ac3279c711 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Refreshing network info cache for port d6e5e759-86e1-4f76-9b65-19b2691780df {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 992.820370] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:49:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f35e69ef-c2c8-4b8c-9887-33e97b242c0a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd6e5e759-86e1-4f76-9b65-19b2691780df', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 992.828457] env[63345]: DEBUG oslo.service.loopingcall [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.833455] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.833455] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.833455] env[63345]: DEBUG nova.objects.instance [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lazy-loading 'resources' on Instance uuid 732ac30c-15c1-4c57-bb70-ea3ed51f646b {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 992.834556] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 992.835548] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed59c529-7322-4e08-8aab-f5870c21743a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.857807] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 992.857807] env[63345]: value = "task-1017640" [ 992.857807] env[63345]: _type = "Task" [ 992.857807] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.867461] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017640, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.915519] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31174041-598e-483a-bc7f-63a8bbdfac5c tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "a8321259-b3a6-4e87-b13a-b964cf0dd766" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.961s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.158991] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017639, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.192784] env[63345]: DEBUG oslo_concurrency.lockutils [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "3d644f16-7924-4545-a528-1499a702d614" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.193148] env[63345]: DEBUG oslo_concurrency.lockutils [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "3d644f16-7924-4545-a528-1499a702d614" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.193419] env[63345]: DEBUG oslo_concurrency.lockutils [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "3d644f16-7924-4545-a528-1499a702d614-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.193668] env[63345]: DEBUG oslo_concurrency.lockutils [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "3d644f16-7924-4545-a528-1499a702d614-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.193888] env[63345]: DEBUG oslo_concurrency.lockutils [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "3d644f16-7924-4545-a528-1499a702d614-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.196449] env[63345]: INFO nova.compute.manager [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Terminating instance [ 993.205545] env[63345]: DEBUG nova.network.neutron [req-9899b4b3-f243-425c-9773-ca2dfa92af17 req-fca0124a-d892-4677-9e92-21ac3279c711 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Updated VIF entry in instance network info cache for port d6e5e759-86e1-4f76-9b65-19b2691780df. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 993.205545] env[63345]: DEBUG nova.network.neutron [req-9899b4b3-f243-425c-9773-ca2dfa92af17 req-fca0124a-d892-4677-9e92-21ac3279c711 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Updating instance_info_cache with network_info: [{"id": "d6e5e759-86e1-4f76-9b65-19b2691780df", "address": "fa:16:3e:21:49:6e", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6e5e759-86", "ovs_interfaceid": "d6e5e759-86e1-4f76-9b65-19b2691780df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.226893] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.267638] env[63345]: DEBUG nova.compute.manager [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 993.331292] env[63345]: DEBUG nova.compute.manager [req-553c0fdb-9e88-4497-b679-fdef3ac000b4 req-8e0acfc4-4040-450f-80cd-102d2c133d12 service nova] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Received event network-changed-4b958d52-f058-41bc-a29e-1c8a2749ac18 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 993.331403] env[63345]: DEBUG nova.compute.manager [req-553c0fdb-9e88-4497-b679-fdef3ac000b4 req-8e0acfc4-4040-450f-80cd-102d2c133d12 service nova] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Refreshing instance network info cache due to event network-changed-4b958d52-f058-41bc-a29e-1c8a2749ac18. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 993.331625] env[63345]: DEBUG oslo_concurrency.lockutils [req-553c0fdb-9e88-4497-b679-fdef3ac000b4 req-8e0acfc4-4040-450f-80cd-102d2c133d12 service nova] Acquiring lock "refresh_cache-732ac30c-15c1-4c57-bb70-ea3ed51f646b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.331802] env[63345]: DEBUG oslo_concurrency.lockutils [req-553c0fdb-9e88-4497-b679-fdef3ac000b4 req-8e0acfc4-4040-450f-80cd-102d2c133d12 service nova] Acquired lock "refresh_cache-732ac30c-15c1-4c57-bb70-ea3ed51f646b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.331976] env[63345]: DEBUG nova.network.neutron [req-553c0fdb-9e88-4497-b679-fdef3ac000b4 req-8e0acfc4-4040-450f-80cd-102d2c133d12 service nova] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Refreshing network info cache for port 4b958d52-f058-41bc-a29e-1c8a2749ac18 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 993.336726] env[63345]: DEBUG nova.objects.instance [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lazy-loading 'numa_topology' on Instance uuid 732ac30c-15c1-4c57-bb70-ea3ed51f646b {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 993.369595] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017640, 'name': CreateVM_Task, 'duration_secs': 0.439749} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.370434] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 993.370586] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.370868] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.371400] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 993.371668] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-090178e2-0102-499e-8810-93a731f6df90 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.377349] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 993.377349] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52474a8e-bfa4-318c-ed0f-3cafebc31fcb" [ 993.377349] env[63345]: _type = "Task" [ 993.377349] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.388179] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52474a8e-bfa4-318c-ed0f-3cafebc31fcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.659086] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017639, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.701409] env[63345]: DEBUG nova.compute.manager [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 993.701724] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 993.702903] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53337c7c-281c-4327-ad44-0f7c535c9d94 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.711172] env[63345]: DEBUG oslo_concurrency.lockutils [req-9899b4b3-f243-425c-9773-ca2dfa92af17 req-fca0124a-d892-4677-9e92-21ac3279c711 service nova] Releasing lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.711586] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 993.711836] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8112761f-9144-450e-be9d-2242808b6d84 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.718431] env[63345]: DEBUG oslo_vmware.api [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 993.718431] env[63345]: value = "task-1017641" [ 993.718431] env[63345]: _type = "Task" [ 993.718431] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.726469] env[63345]: DEBUG oslo_vmware.api [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017641, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.791632] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.840141] env[63345]: DEBUG nova.objects.base [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Object Instance<732ac30c-15c1-4c57-bb70-ea3ed51f646b> lazy-loaded attributes: resources,numa_topology {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 993.887690] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52474a8e-bfa4-318c-ed0f-3cafebc31fcb, 'name': SearchDatastore_Task, 'duration_secs': 0.012613} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.890491] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.890823] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 993.891155] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.891398] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.891596] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 993.892115] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d22ffa57-1eac-4324-a694-9aeee16a90eb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.902649] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 993.902977] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 993.907557] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c2f2321-8555-423d-b753-baf47d62fce0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.915120] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 993.915120] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529c0cd1-a353-9797-db38-b1c09ae1b23e" [ 993.915120] env[63345]: _type = "Task" [ 993.915120] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.934658] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]529c0cd1-a353-9797-db38-b1c09ae1b23e, 'name': SearchDatastore_Task, 'duration_secs': 0.015047} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.935837] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d6591df-d3ce-4cea-af57-b4daa134659f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.948893] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 993.948893] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52518776-1a1d-3f8a-a37a-bb13b450754d" [ 993.948893] env[63345]: _type = "Task" [ 993.948893] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.959484] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52518776-1a1d-3f8a-a37a-bb13b450754d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.050055] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e430fc-1dbe-4343-b462-d1d507eabbae {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.057935] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8828afde-9cb5-4723-98e2-f93027fb896b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.093352] env[63345]: DEBUG nova.network.neutron [req-553c0fdb-9e88-4497-b679-fdef3ac000b4 req-8e0acfc4-4040-450f-80cd-102d2c133d12 service nova] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Updated VIF entry in instance network info cache for port 4b958d52-f058-41bc-a29e-1c8a2749ac18. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 994.094021] env[63345]: DEBUG nova.network.neutron [req-553c0fdb-9e88-4497-b679-fdef3ac000b4 req-8e0acfc4-4040-450f-80cd-102d2c133d12 service nova] [instance: 732ac30c-15c1-4c57-bb70-ea3ed51f646b] Updating instance_info_cache with network_info: [{"id": "4b958d52-f058-41bc-a29e-1c8a2749ac18", "address": "fa:16:3e:c1:c5:58", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": null, "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap4b958d52-f0", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.096831] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ae5cca-4456-4997-873b-a5de0a292d4f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.105273] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4887d5-6bba-45e7-bea0-00b60e460d81 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.120893] env[63345]: DEBUG nova.compute.provider_tree [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.158114] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017639, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.229145] env[63345]: DEBUG oslo_vmware.api [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017641, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.460664] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52518776-1a1d-3f8a-a37a-bb13b450754d, 'name': SearchDatastore_Task, 'duration_secs': 0.01437} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.460987] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.461300] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2/7057cdfc-a6d9-4e52-b650-6a5709d5f8c2.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 994.461595] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-19b00059-bf19-4a71-8562-2d2202eb8f5c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.469019] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 994.469019] env[63345]: value = "task-1017642" [ 994.469019] env[63345]: _type = "Task" [ 994.469019] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.477815] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017642, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.600933] env[63345]: DEBUG oslo_concurrency.lockutils [req-553c0fdb-9e88-4497-b679-fdef3ac000b4 req-8e0acfc4-4040-450f-80cd-102d2c133d12 service nova] Releasing lock "refresh_cache-732ac30c-15c1-4c57-bb70-ea3ed51f646b" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.624615] env[63345]: DEBUG nova.scheduler.client.report [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 994.659612] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017639, 'name': ReconfigVM_Task, 'duration_secs': 1.550261} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.659911] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 3d1e47c5-7e8c-417c-8c7c-009db666d391/3d1e47c5-7e8c-417c-8c7c-009db666d391.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 994.660592] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74571562-9103-4e06-8cf9-c638cf9b1d0b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.667397] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Waiting for the task: (returnval){ [ 994.667397] env[63345]: value = "task-1017643" [ 994.667397] env[63345]: _type = "Task" [ 994.667397] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.678344] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017643, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.730771] env[63345]: DEBUG oslo_vmware.api [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017641, 'name': PowerOffVM_Task, 'duration_secs': 0.623083} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.731107] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 994.731293] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 994.731579] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b26cf761-c83a-4a84-bbd9-dec8ae1a7b91 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.821508] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 994.821770] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 994.821957] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleting the datastore file [datastore2] 3d644f16-7924-4545-a528-1499a702d614 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 994.822460] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d2bb48cd-f72e-4fbb-b876-4765f23ed437 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.830126] env[63345]: DEBUG oslo_vmware.api [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 994.830126] env[63345]: value = "task-1017645" [ 994.830126] env[63345]: _type = "Task" [ 994.830126] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.838705] env[63345]: DEBUG oslo_vmware.api [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017645, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.979679] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017642, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504079} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.980116] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2/7057cdfc-a6d9-4e52-b650-6a5709d5f8c2.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 994.980355] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 994.980652] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-103b0c80-a356-45f7-9229-68b065075a4c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.987052] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 994.987052] env[63345]: value = "task-1017646" [ 994.987052] env[63345]: _type = "Task" [ 994.987052] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.997109] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017646, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.130504] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.297s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.133106] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.906s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.133299] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.133461] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63345) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 995.134045] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.342s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.135216] env[63345]: INFO nova.compute.claims [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 995.138236] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492832f9-9f91-4ec3-b2c7-c6df6efbd57e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.147564] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80cc1b11-e976-4a99-96ca-a572889e8ab1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.163599] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7061772c-32c4-4a94-b003-50401e2b9fb0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.173425] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31540419-5ce0-4108-b31f-67054cb6068d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.182779] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017643, 'name': Rename_Task, 'duration_secs': 0.384074} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.207433] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 995.208156] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179819MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=63345) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 995.208305] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.208670] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09386db6-9cfb-4afd-9e34-17ae3dcfbedd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.215874] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Waiting for the task: (returnval){ [ 995.215874] env[63345]: value = "task-1017647" [ 995.215874] env[63345]: _type = "Task" [ 995.215874] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.223685] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017647, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.339590] env[63345]: DEBUG oslo_vmware.api [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017645, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.317374} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.340053] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 995.340118] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 995.340326] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 995.340521] env[63345]: INFO nova.compute.manager [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 3d644f16-7924-4545-a528-1499a702d614] Took 1.64 seconds to destroy the instance on the hypervisor. [ 995.340770] env[63345]: DEBUG oslo.service.loopingcall [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 995.340972] env[63345]: DEBUG nova.compute.manager [-] [instance: 3d644f16-7924-4545-a528-1499a702d614] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 995.341080] env[63345]: DEBUG nova.network.neutron [-] [instance: 3d644f16-7924-4545-a528-1499a702d614] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 995.497852] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017646, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.110655} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.498695] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 995.499386] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8336bdb-3802-47cb-a1ee-7f352138cfbe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.523188] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2/7057cdfc-a6d9-4e52-b650-6a5709d5f8c2.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 995.523552] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b09f596-d09f-4fa4-9ae2-04e340b545db {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.543658] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 995.543658] env[63345]: value = "task-1017648" [ 995.543658] env[63345]: _type = "Task" [ 995.543658] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.553919] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017648, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.644209] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8431e362-aa76-47e3-808e-e5699bc83bf5 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "732ac30c-15c1-4c57-bb70-ea3ed51f646b" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.467s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.726671] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017647, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.054120] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.059639] env[63345]: DEBUG nova.network.neutron [-] [instance: 3d644f16-7924-4545-a528-1499a702d614] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.226641] env[63345]: DEBUG oslo_vmware.api [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017647, 'name': PowerOnVM_Task, 'duration_secs': 0.561491} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.229533] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 996.229788] env[63345]: INFO nova.compute.manager [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Took 9.14 seconds to spawn the instance on the hypervisor. [ 996.230085] env[63345]: DEBUG nova.compute.manager [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 996.231165] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf4b8eb-aa50-4511-8068-358b77cee66f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.327631] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad60a8d-5a4f-4fdd-b15c-b3f55a271214 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.336307] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9eea82a-7234-4bee-93c0-7cb185f567c2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.370595] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e4baa1-6c4e-49bb-aa50-bd3a74089e02 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.378378] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851ee5f9-4227-4bd7-b052-872e4dea4a52 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.393515] env[63345]: DEBUG nova.compute.provider_tree [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 996.554449] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017648, 'name': ReconfigVM_Task, 'duration_secs': 0.770381} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.554758] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Reconfigured VM instance instance-00000063 to attach disk [datastore2] 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2/7057cdfc-a6d9-4e52-b650-6a5709d5f8c2.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 996.555466] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bef54b32-aaac-426a-891b-2a586c30802c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.562428] env[63345]: INFO nova.compute.manager [-] [instance: 3d644f16-7924-4545-a528-1499a702d614] Took 1.22 seconds to deallocate network for instance. [ 996.562772] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 996.562772] env[63345]: value = "task-1017649" [ 996.562772] env[63345]: _type = "Task" [ 996.562772] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.575325] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017649, 'name': Rename_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.754212] env[63345]: INFO nova.compute.manager [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Took 18.12 seconds to build instance. [ 996.897433] env[63345]: DEBUG nova.scheduler.client.report [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 997.073041] env[63345]: DEBUG oslo_concurrency.lockutils [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.077231] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017649, 'name': Rename_Task, 'duration_secs': 0.21192} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.077543] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 997.077810] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a35acff-5498-4740-bae9-91a8cd28b90d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.085193] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 997.085193] env[63345]: value = "task-1017650" [ 997.085193] env[63345]: _type = "Task" [ 997.085193] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.099749] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017650, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.257243] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1f445b4b-8d08-4bdb-bbfa-b22c52c56b06 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Lock "3d1e47c5-7e8c-417c-8c7c-009db666d391" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.630s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.405752] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.272s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.406278] env[63345]: DEBUG nova.compute.manager [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 997.408891] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.200s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.598337] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017650, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.912937] env[63345]: DEBUG nova.compute.utils [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 997.924448] env[63345]: DEBUG nova.compute.manager [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 997.924759] env[63345]: DEBUG nova.network.neutron [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 997.979923] env[63345]: DEBUG nova.policy [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dd618fef89a843209784ca9e925d18eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cb91ecf5d00e48dea9baf2122ac4fed7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 998.097926] env[63345]: DEBUG oslo_vmware.api [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017650, 'name': PowerOnVM_Task, 'duration_secs': 0.695127} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.098274] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 998.098471] env[63345]: INFO nova.compute.manager [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Took 7.73 seconds to spawn the instance on the hypervisor. [ 998.098648] env[63345]: DEBUG nova.compute.manager [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 998.099641] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e4d36a-0b64-4bac-b3ce-92af8c243ac6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.260160] env[63345]: DEBUG nova.network.neutron [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Successfully created port: bca55223-b7b4-4623-abaf-4d4a68f5b7cc {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 998.425752] env[63345]: DEBUG nova.compute.manager [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 998.460769] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 869f8110-6490-4a47-955a-0ce085f826af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 998.460968] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 5e20b33c-1481-4bd3-b269-29a70cc3150d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 998.461114] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 9aa651b8-317d-4153-8c33-9df0a5d16115 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 998.461358] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 726332dd-8699-49a4-a9ea-b9cbfc159855 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 998.461358] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance a0eb9dae-0d27-419f-9210-eaa445e564c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 998.461474] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance b3f20003-f75d-4d9f-bb4a-02d2930054a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 998.461620] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 22a11cf9-8f85-4371-98eb-25b267c9aff7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 998.461753] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance dd624e54-bd5b-4660-88a1-9d6f36560421 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 998.461893] env[63345]: WARNING nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 3d644f16-7924-4545-a528-1499a702d614 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 998.462026] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance a8321259-b3a6-4e87-b13a-b964cf0dd766 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 998.462171] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 3d1e47c5-7e8c-417c-8c7c-009db666d391 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 998.462308] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 998.462426] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 998.462640] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 998.462837] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2816MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 998.621062] env[63345]: INFO nova.compute.manager [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Took 13.66 seconds to build instance. [ 998.663788] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-542fbd9f-df73-4f24-93f2-de8c5057c57b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.672766] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2bd923-2237-4943-bbc5-64972ae1b6c2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.703290] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebebece5-07ea-4b3f-b7c4-3c7af4114c12 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.711961] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59391c8-419c-4afb-97af-f339f71a5225 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.728071] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.123460] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ced897d-c6e8-4c35-bc02-93cd3847e9eb tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.170s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.231121] env[63345]: DEBUG nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 999.441627] env[63345]: DEBUG nova.compute.manager [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 999.549565] env[63345]: DEBUG oslo_vmware.rw_handles [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521f398a-8ab7-676e-2f5f-026d1bb4fac4/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 999.551851] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e58570-d3f2-4b79-9332-de49fdceb45c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.559251] env[63345]: DEBUG oslo_vmware.rw_handles [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521f398a-8ab7-676e-2f5f-026d1bb4fac4/disk-0.vmdk is in state: ready. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 999.559493] env[63345]: ERROR oslo_vmware.rw_handles [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521f398a-8ab7-676e-2f5f-026d1bb4fac4/disk-0.vmdk due to incomplete transfer. [ 999.561594] env[63345]: DEBUG nova.virt.hardware [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 999.561846] env[63345]: DEBUG nova.virt.hardware [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 999.562018] env[63345]: DEBUG nova.virt.hardware [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 999.562255] env[63345]: DEBUG nova.virt.hardware [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 999.562420] env[63345]: DEBUG nova.virt.hardware [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 999.562574] env[63345]: DEBUG nova.virt.hardware [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 999.562783] env[63345]: DEBUG nova.virt.hardware [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 999.562948] env[63345]: DEBUG nova.virt.hardware [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 999.563134] env[63345]: DEBUG nova.virt.hardware [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 999.563304] env[63345]: DEBUG nova.virt.hardware [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 999.563482] env[63345]: DEBUG nova.virt.hardware [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 999.563750] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-219c07cc-e62d-4fa6-8ad4-bf8d191afdbb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.565893] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d6811f-ddc2-4cd1-af03-9d17e6b29873 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.574399] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7703055e-9a64-40c6-a877-352adc1192d3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.579196] env[63345]: DEBUG oslo_vmware.rw_handles [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521f398a-8ab7-676e-2f5f-026d1bb4fac4/disk-0.vmdk. {{(pid=63345) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 999.579414] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Uploaded image fdfac4b6-e678-4e72-af5e-15f172b83432 to the Glance image server {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 999.581810] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Destroying the VM {{(pid=63345) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 999.582457] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a9fde494-2416-488f-8dd7-136a262d89bf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.597145] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 999.597145] env[63345]: value = "task-1017651" [ 999.597145] env[63345]: _type = "Task" [ 999.597145] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.604547] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017651, 'name': Destroy_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.736668] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63345) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 999.736894] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.328s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.737212] env[63345]: DEBUG oslo_concurrency.lockutils [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.665s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.737432] env[63345]: DEBUG oslo_concurrency.lockutils [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.758554] env[63345]: INFO nova.scheduler.client.report [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleted allocations for instance 3d644f16-7924-4545-a528-1499a702d614 [ 1000.107258] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017651, 'name': Destroy_Task} progress is 33%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.116094] env[63345]: DEBUG nova.network.neutron [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Successfully updated port: bca55223-b7b4-4623-abaf-4d4a68f5b7cc {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1000.225806] env[63345]: DEBUG nova.compute.manager [req-804c599d-3681-4f7a-b117-d467994fbed3 req-aa21c492-0292-49f1-974f-f0393fa36818 service nova] [instance: 3d644f16-7924-4545-a528-1499a702d614] Received event network-vif-deleted-15685195-e615-4386-8884-fbd1fd0e8221 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1000.266744] env[63345]: DEBUG oslo_concurrency.lockutils [None req-acfbc350-d00b-40a0-afc1-114dc56b00d3 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "3d644f16-7924-4545-a528-1499a702d614" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.074s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.613022] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017651, 'name': Destroy_Task, 'duration_secs': 0.856638} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.613022] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Destroyed the VM [ 1000.613022] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Deleting Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1000.613022] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5a444599-c8a6-4440-8e21-19923fe4ab32 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.623632] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "refresh_cache-0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.623632] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired lock "refresh_cache-0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.623632] env[63345]: DEBUG nova.network.neutron [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1000.627805] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1000.627805] env[63345]: value = "task-1017652" [ 1000.627805] env[63345]: _type = "Task" [ 1000.627805] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.637460] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017652, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.139493] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017652, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.217568] env[63345]: DEBUG nova.network.neutron [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1001.226489] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "a0eb9dae-0d27-419f-9210-eaa445e564c8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.226770] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "a0eb9dae-0d27-419f-9210-eaa445e564c8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.226991] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "a0eb9dae-0d27-419f-9210-eaa445e564c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.227220] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "a0eb9dae-0d27-419f-9210-eaa445e564c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.227419] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "a0eb9dae-0d27-419f-9210-eaa445e564c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.230250] env[63345]: INFO nova.compute.manager [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Terminating instance [ 1001.468179] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1e7ece6b-bc63-4841-8e73-7426c4d38a0d tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "9aa651b8-317d-4153-8c33-9df0a5d16115" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.469107] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1e7ece6b-bc63-4841-8e73-7426c4d38a0d tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "9aa651b8-317d-4153-8c33-9df0a5d16115" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.554805] env[63345]: DEBUG nova.network.neutron [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Updating instance_info_cache with network_info: [{"id": "bca55223-b7b4-4623-abaf-4d4a68f5b7cc", "address": "fa:16:3e:6d:80:f9", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbca55223-b7", "ovs_interfaceid": "bca55223-b7b4-4623-abaf-4d4a68f5b7cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.639661] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017652, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.734197] env[63345]: DEBUG nova.compute.manager [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1001.734450] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1001.735373] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7daaab-fa1f-48d6-955e-1e46b535c258 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.743399] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1001.743629] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b93f52fa-6f5c-4e33-a8c1-aa15accfd905 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.749164] env[63345]: DEBUG oslo_vmware.api [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1001.749164] env[63345]: value = "task-1017653" [ 1001.749164] env[63345]: _type = "Task" [ 1001.749164] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.756477] env[63345]: DEBUG oslo_vmware.api [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017653, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.972549] env[63345]: INFO nova.compute.manager [None req-1e7ece6b-bc63-4841-8e73-7426c4d38a0d tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Detaching volume 78ab0d16-eec6-45ab-badd-c9109f513975 [ 1002.018853] env[63345]: INFO nova.virt.block_device [None req-1e7ece6b-bc63-4841-8e73-7426c4d38a0d tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Attempting to driver detach volume 78ab0d16-eec6-45ab-badd-c9109f513975 from mountpoint /dev/sdb [ 1002.019130] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e7ece6b-bc63-4841-8e73-7426c4d38a0d tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Volume detach. Driver type: vmdk {{(pid=63345) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1002.019399] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e7ece6b-bc63-4841-8e73-7426c4d38a0d tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226101', 'volume_id': '78ab0d16-eec6-45ab-badd-c9109f513975', 'name': 'volume-78ab0d16-eec6-45ab-badd-c9109f513975', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '9aa651b8-317d-4153-8c33-9df0a5d16115', 'attached_at': '', 'detached_at': '', 'volume_id': '78ab0d16-eec6-45ab-badd-c9109f513975', 'serial': '78ab0d16-eec6-45ab-badd-c9109f513975'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1002.021281] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a7c625-aedf-479f-8d76-cf40f6437a07 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.050788] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398197e6-9509-45d5-bda8-1c940eb8d54d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.061538] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Releasing lock "refresh_cache-0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.061538] env[63345]: DEBUG nova.compute.manager [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Instance network_info: |[{"id": "bca55223-b7b4-4623-abaf-4d4a68f5b7cc", "address": "fa:16:3e:6d:80:f9", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbca55223-b7", "ovs_interfaceid": "bca55223-b7b4-4623-abaf-4d4a68f5b7cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1002.063612] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:80:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '68add7d6-c025-46fa-84d3-9c589adb63e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bca55223-b7b4-4623-abaf-4d4a68f5b7cc', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1002.072554] env[63345]: DEBUG oslo.service.loopingcall [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1002.073846] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "34990fa5-4a89-4430-8ea7-9e73dd41f441" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.073846] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "34990fa5-4a89-4430-8ea7-9e73dd41f441" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.077398] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1002.077614] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-100acc99-4c3c-41c4-83fe-e9b64cfe0482 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.093289] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fff922d-5ab0-4054-8310-60e0397706f9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.116616] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f51c7187-8d7c-457c-b69a-a8a1575c16d3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.119585] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1002.119585] env[63345]: value = "task-1017654" [ 1002.119585] env[63345]: _type = "Task" [ 1002.119585] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.134315] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e7ece6b-bc63-4841-8e73-7426c4d38a0d tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] The volume has not been displaced from its original location: [datastore2] volume-78ab0d16-eec6-45ab-badd-c9109f513975/volume-78ab0d16-eec6-45ab-badd-c9109f513975.vmdk. No consolidation needed. {{(pid=63345) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1002.139799] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e7ece6b-bc63-4841-8e73-7426c4d38a0d tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Reconfiguring VM instance instance-00000042 to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1002.143873] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1bcb6c0d-d321-4156-b62f-53f45b2c3753 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.161986] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017654, 'name': CreateVM_Task} progress is 15%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.168322] env[63345]: DEBUG oslo_vmware.api [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017652, 'name': RemoveSnapshot_Task, 'duration_secs': 1.053857} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.169971] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Deleted Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1002.170312] env[63345]: INFO nova.compute.manager [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Took 18.44 seconds to snapshot the instance on the hypervisor. [ 1002.175312] env[63345]: DEBUG oslo_vmware.api [None req-1e7ece6b-bc63-4841-8e73-7426c4d38a0d tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1002.175312] env[63345]: value = "task-1017655" [ 1002.175312] env[63345]: _type = "Task" [ 1002.175312] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.183404] env[63345]: DEBUG oslo_vmware.api [None req-1e7ece6b-bc63-4841-8e73-7426c4d38a0d tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017655, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.223909] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "a7d80763-92f0-45a9-b24b-1f973bffb376" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.224296] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "a7d80763-92f0-45a9-b24b-1f973bffb376" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.259327] env[63345]: DEBUG oslo_vmware.api [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017653, 'name': PowerOffVM_Task, 'duration_secs': 0.343603} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.259728] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1002.259824] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1002.260337] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05f402f5-283e-4956-9c48-f7767e74e73d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.291056] env[63345]: DEBUG nova.compute.manager [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Received event network-vif-plugged-bca55223-b7b4-4623-abaf-4d4a68f5b7cc {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1002.291322] env[63345]: DEBUG oslo_concurrency.lockutils [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] Acquiring lock "0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.291866] env[63345]: DEBUG oslo_concurrency.lockutils [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] Lock "0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.291866] env[63345]: DEBUG oslo_concurrency.lockutils [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] Lock "0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.291866] env[63345]: DEBUG nova.compute.manager [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] No waiting events found dispatching network-vif-plugged-bca55223-b7b4-4623-abaf-4d4a68f5b7cc {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1002.292162] env[63345]: WARNING nova.compute.manager [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Received unexpected event network-vif-plugged-bca55223-b7b4-4623-abaf-4d4a68f5b7cc for instance with vm_state building and task_state spawning. [ 1002.292322] env[63345]: DEBUG nova.compute.manager [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Received event network-changed-bca55223-b7b4-4623-abaf-4d4a68f5b7cc {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1002.292703] env[63345]: DEBUG nova.compute.manager [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Refreshing instance network info cache due to event network-changed-bca55223-b7b4-4623-abaf-4d4a68f5b7cc. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1002.292703] env[63345]: DEBUG oslo_concurrency.lockutils [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] Acquiring lock "refresh_cache-0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.292807] env[63345]: DEBUG oslo_concurrency.lockutils [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] Acquired lock "refresh_cache-0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.292921] env[63345]: DEBUG nova.network.neutron [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Refreshing network info cache for port bca55223-b7b4-4623-abaf-4d4a68f5b7cc {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1002.295991] env[63345]: DEBUG oslo_vmware.rw_handles [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526f5a24-b102-1e3b-9f2e-6fd60fcf4ff3/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1002.297460] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f693bf-fa2f-4071-b2c5-38f7fce00183 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.305545] env[63345]: DEBUG oslo_vmware.rw_handles [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526f5a24-b102-1e3b-9f2e-6fd60fcf4ff3/disk-0.vmdk is in state: ready. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1002.305545] env[63345]: ERROR oslo_vmware.rw_handles [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526f5a24-b102-1e3b-9f2e-6fd60fcf4ff3/disk-0.vmdk due to incomplete transfer. [ 1002.310019] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f121b2b7-1c31-4b64-b5ec-cd05d145f056 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.315309] env[63345]: DEBUG oslo_vmware.rw_handles [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526f5a24-b102-1e3b-9f2e-6fd60fcf4ff3/disk-0.vmdk. {{(pid=63345) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1002.316656] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Uploaded image 8d5449ef-36c9-46f7-8cb0-2b5ee0ade1fd to the Glance image server {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1002.317202] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Destroying the VM {{(pid=63345) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1002.317514] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b3092f97-05d7-4eac-9336-42cdf9d8efd1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.324237] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the task: (returnval){ [ 1002.324237] env[63345]: value = "task-1017657" [ 1002.324237] env[63345]: _type = "Task" [ 1002.324237] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.329266] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1002.329266] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1002.329447] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Deleting the datastore file [datastore2] a0eb9dae-0d27-419f-9210-eaa445e564c8 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1002.330215] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0547c621-a66f-4b81-98cd-f38a9b02937c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.336122] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017657, 'name': Destroy_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.340280] env[63345]: DEBUG oslo_vmware.api [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1002.340280] env[63345]: value = "task-1017658" [ 1002.340280] env[63345]: _type = "Task" [ 1002.340280] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.348810] env[63345]: DEBUG oslo_vmware.api [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017658, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.593565] env[63345]: DEBUG nova.compute.manager [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1002.632923] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017654, 'name': CreateVM_Task, 'duration_secs': 0.484163} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.634224] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1002.634947] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.635190] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.635485] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1002.636009] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a95619e7-010c-42f2-ab27-30974c39acf0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.642073] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1002.642073] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5243554e-78c0-1138-0750-eadc5998f1db" [ 1002.642073] env[63345]: _type = "Task" [ 1002.642073] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.649797] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5243554e-78c0-1138-0750-eadc5998f1db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.688532] env[63345]: DEBUG oslo_vmware.api [None req-1e7ece6b-bc63-4841-8e73-7426c4d38a0d tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017655, 'name': ReconfigVM_Task, 'duration_secs': 0.391991} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.688532] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e7ece6b-bc63-4841-8e73-7426c4d38a0d tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Reconfigured VM instance instance-00000042 to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1002.692059] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b52978f-1018-4685-af1d-7e63ff232d77 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.707371] env[63345]: DEBUG oslo_vmware.api [None req-1e7ece6b-bc63-4841-8e73-7426c4d38a0d tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1002.707371] env[63345]: value = "task-1017659" [ 1002.707371] env[63345]: _type = "Task" [ 1002.707371] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.716060] env[63345]: DEBUG oslo_vmware.api [None req-1e7ece6b-bc63-4841-8e73-7426c4d38a0d tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017659, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.727296] env[63345]: DEBUG nova.compute.manager [None req-958f9d2f-620a-4f2b-a6c2-282bda8a46ca tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Found 1 images (rotation: 2) {{(pid=63345) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4883}} [ 1002.728268] env[63345]: DEBUG nova.compute.manager [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1002.836931] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017657, 'name': Destroy_Task} progress is 33%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.848803] env[63345]: DEBUG oslo_vmware.api [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017658, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.281548} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.849097] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1002.849294] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1002.849559] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1002.849777] env[63345]: INFO nova.compute.manager [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1002.850049] env[63345]: DEBUG oslo.service.loopingcall [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1002.850244] env[63345]: DEBUG nova.compute.manager [-] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1002.850340] env[63345]: DEBUG nova.network.neutron [-] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1002.870490] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "a8321259-b3a6-4e87-b13a-b964cf0dd766" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.870761] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "a8321259-b3a6-4e87-b13a-b964cf0dd766" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.870977] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "a8321259-b3a6-4e87-b13a-b964cf0dd766-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.871206] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "a8321259-b3a6-4e87-b13a-b964cf0dd766-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.871416] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "a8321259-b3a6-4e87-b13a-b964cf0dd766-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.873910] env[63345]: INFO nova.compute.manager [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Terminating instance [ 1002.926164] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Acquiring lock "3d1e47c5-7e8c-417c-8c7c-009db666d391" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.926164] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Lock "3d1e47c5-7e8c-417c-8c7c-009db666d391" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.926164] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Acquiring lock "3d1e47c5-7e8c-417c-8c7c-009db666d391-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.926164] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Lock "3d1e47c5-7e8c-417c-8c7c-009db666d391-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.926164] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Lock "3d1e47c5-7e8c-417c-8c7c-009db666d391-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.927185] env[63345]: INFO nova.compute.manager [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Terminating instance [ 1003.122424] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.122708] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.125019] env[63345]: INFO nova.compute.claims [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1003.153769] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5243554e-78c0-1138-0750-eadc5998f1db, 'name': SearchDatastore_Task, 'duration_secs': 0.052612} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.155102] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.155418] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1003.155897] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.156094] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.156311] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1003.156630] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba4d9363-8de0-4400-ab22-f6707fa17d44 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.173502] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1003.173502] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1003.174247] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0a90e3d-d522-474b-90d4-ce78c721ec1f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.181917] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1003.181917] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5228b968-5993-faa9-d896-59c54a9666fe" [ 1003.181917] env[63345]: _type = "Task" [ 1003.181917] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.191816] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5228b968-5993-faa9-d896-59c54a9666fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.218469] env[63345]: DEBUG oslo_vmware.api [None req-1e7ece6b-bc63-4841-8e73-7426c4d38a0d tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017659, 'name': ReconfigVM_Task, 'duration_secs': 0.233387} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.218932] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e7ece6b-bc63-4841-8e73-7426c4d38a0d tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226101', 'volume_id': '78ab0d16-eec6-45ab-badd-c9109f513975', 'name': 'volume-78ab0d16-eec6-45ab-badd-c9109f513975', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '9aa651b8-317d-4153-8c33-9df0a5d16115', 'attached_at': '', 'detached_at': '', 'volume_id': '78ab0d16-eec6-45ab-badd-c9109f513975', 'serial': '78ab0d16-eec6-45ab-badd-c9109f513975'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1003.250309] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.335634] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017657, 'name': Destroy_Task, 'duration_secs': 0.735371} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.335910] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Destroyed the VM [ 1003.336203] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Deleting Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1003.336439] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-cc233724-6a6a-436e-9d02-a65db3882fb7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.342596] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the task: (returnval){ [ 1003.342596] env[63345]: value = "task-1017660" [ 1003.342596] env[63345]: _type = "Task" [ 1003.342596] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.355533] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017660, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.377979] env[63345]: DEBUG nova.compute.manager [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1003.378405] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1003.380068] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5bf5c55-c753-418b-a0e4-806d2ce73901 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.392640] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1003.392640] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b77e2c8-cfb0-45da-ba66-236347ba0ccc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.402771] env[63345]: DEBUG oslo_vmware.api [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 1003.402771] env[63345]: value = "task-1017661" [ 1003.402771] env[63345]: _type = "Task" [ 1003.402771] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.413253] env[63345]: DEBUG nova.compute.manager [req-451130c5-0ee6-4442-ae76-b4e6201ac590 req-4000e5fa-de00-406e-b3df-b1b174d6c82e service nova] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Received event network-vif-deleted-bbb77ecb-8d67-4f41-9505-2571225c8480 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1003.414211] env[63345]: INFO nova.compute.manager [req-451130c5-0ee6-4442-ae76-b4e6201ac590 req-4000e5fa-de00-406e-b3df-b1b174d6c82e service nova] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Neutron deleted interface bbb77ecb-8d67-4f41-9505-2571225c8480; detaching it from the instance and deleting it from the info cache [ 1003.414211] env[63345]: DEBUG nova.network.neutron [req-451130c5-0ee6-4442-ae76-b4e6201ac590 req-4000e5fa-de00-406e-b3df-b1b174d6c82e service nova] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.426610] env[63345]: DEBUG oslo_vmware.api [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017661, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.427562] env[63345]: DEBUG nova.network.neutron [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Updated VIF entry in instance network info cache for port bca55223-b7b4-4623-abaf-4d4a68f5b7cc. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1003.427680] env[63345]: DEBUG nova.network.neutron [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Updating instance_info_cache with network_info: [{"id": "bca55223-b7b4-4623-abaf-4d4a68f5b7cc", "address": "fa:16:3e:6d:80:f9", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbca55223-b7", "ovs_interfaceid": "bca55223-b7b4-4623-abaf-4d4a68f5b7cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.432622] env[63345]: DEBUG nova.compute.manager [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1003.432823] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1003.434387] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5705266f-6ede-4f89-9410-41df57f09c36 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.444808] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1003.445108] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-433c72b1-3d51-4580-82e4-6e573c024615 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.453078] env[63345]: DEBUG oslo_vmware.api [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Waiting for the task: (returnval){ [ 1003.453078] env[63345]: value = "task-1017662" [ 1003.453078] env[63345]: _type = "Task" [ 1003.453078] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.461896] env[63345]: DEBUG oslo_vmware.api [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017662, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.692834] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5228b968-5993-faa9-d896-59c54a9666fe, 'name': SearchDatastore_Task, 'duration_secs': 0.01846} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.693693] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64768907-dd8a-427d-92f3-9aa10807e109 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.699516] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1003.699516] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c3f297-097c-00cd-8bd3-d39799d40fd9" [ 1003.699516] env[63345]: _type = "Task" [ 1003.699516] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.709127] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c3f297-097c-00cd-8bd3-d39799d40fd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.777519] env[63345]: DEBUG nova.objects.instance [None req-1e7ece6b-bc63-4841-8e73-7426c4d38a0d tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lazy-loading 'flavor' on Instance uuid 9aa651b8-317d-4153-8c33-9df0a5d16115 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1003.849206] env[63345]: DEBUG nova.network.neutron [-] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.854620] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017660, 'name': RemoveSnapshot_Task} progress is 15%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.913351] env[63345]: DEBUG oslo_vmware.api [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017661, 'name': PowerOffVM_Task, 'duration_secs': 0.319591} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.913787] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1003.914089] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1003.914472] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4144b515-4fb7-4cc1-945b-a735aef26e92 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.918337] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-843f9c83-33e0-41a4-b81f-60ce93947c6e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.929845] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d26f837-6b59-40bf-a31b-26c6eac7cf9a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.942491] env[63345]: DEBUG oslo_concurrency.lockutils [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] Releasing lock "refresh_cache-0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.942783] env[63345]: DEBUG nova.compute.manager [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Received event network-changed-8a3e5f64-f812-4c1b-a9e0-b8b3146a1467 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1003.942960] env[63345]: DEBUG nova.compute.manager [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Refreshing instance network info cache due to event network-changed-8a3e5f64-f812-4c1b-a9e0-b8b3146a1467. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1003.943199] env[63345]: DEBUG oslo_concurrency.lockutils [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] Acquiring lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.943346] env[63345]: DEBUG oslo_concurrency.lockutils [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] Acquired lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.943517] env[63345]: DEBUG nova.network.neutron [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Refreshing network info cache for port 8a3e5f64-f812-4c1b-a9e0-b8b3146a1467 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1003.967614] env[63345]: DEBUG nova.compute.manager [req-451130c5-0ee6-4442-ae76-b4e6201ac590 req-4000e5fa-de00-406e-b3df-b1b174d6c82e service nova] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Detach interface failed, port_id=bbb77ecb-8d67-4f41-9505-2571225c8480, reason: Instance a0eb9dae-0d27-419f-9210-eaa445e564c8 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1003.974433] env[63345]: DEBUG oslo_vmware.api [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017662, 'name': PowerOffVM_Task, 'duration_secs': 0.181466} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.974433] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1003.974433] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1003.974433] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e092f67c-26f4-4147-9351-ee798f7d92cc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.994146] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1003.994408] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1003.994601] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Deleting the datastore file [datastore2] a8321259-b3a6-4e87-b13a-b964cf0dd766 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1003.995409] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-491da22d-0335-4825-b3b3-97e6d02d55dc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.001720] env[63345]: DEBUG oslo_vmware.api [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for the task: (returnval){ [ 1004.001720] env[63345]: value = "task-1017665" [ 1004.001720] env[63345]: _type = "Task" [ 1004.001720] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.010813] env[63345]: DEBUG oslo_vmware.api [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017665, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.045105] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1004.045386] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1004.045597] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Deleting the datastore file [datastore2] 3d1e47c5-7e8c-417c-8c7c-009db666d391 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1004.045910] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ef1b787-2b88-4e44-98f6-c45c9dbc4d2b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.054956] env[63345]: DEBUG oslo_vmware.api [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Waiting for the task: (returnval){ [ 1004.054956] env[63345]: value = "task-1017666" [ 1004.054956] env[63345]: _type = "Task" [ 1004.054956] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.065965] env[63345]: DEBUG oslo_vmware.api [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017666, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.209954] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c3f297-097c-00cd-8bd3-d39799d40fd9, 'name': SearchDatastore_Task, 'duration_secs': 0.013817} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.210493] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.210623] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a/0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1004.210817] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32107130-bb3e-48ca-9469-600d666cb3a0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.218090] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1004.218090] env[63345]: value = "task-1017667" [ 1004.218090] env[63345]: _type = "Task" [ 1004.218090] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.225664] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017667, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.321210] env[63345]: DEBUG nova.compute.manager [req-bead40e2-e280-4255-b4a8-8a147270f5f2 req-2a4d3c60-cee0-4e26-9fee-758fde144f87 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Received event network-changed-d6e5e759-86e1-4f76-9b65-19b2691780df {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1004.321423] env[63345]: DEBUG nova.compute.manager [req-bead40e2-e280-4255-b4a8-8a147270f5f2 req-2a4d3c60-cee0-4e26-9fee-758fde144f87 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Refreshing instance network info cache due to event network-changed-d6e5e759-86e1-4f76-9b65-19b2691780df. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1004.321741] env[63345]: DEBUG oslo_concurrency.lockutils [req-bead40e2-e280-4255-b4a8-8a147270f5f2 req-2a4d3c60-cee0-4e26-9fee-758fde144f87 service nova] Acquiring lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.321809] env[63345]: DEBUG oslo_concurrency.lockutils [req-bead40e2-e280-4255-b4a8-8a147270f5f2 req-2a4d3c60-cee0-4e26-9fee-758fde144f87 service nova] Acquired lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.321961] env[63345]: DEBUG nova.network.neutron [req-bead40e2-e280-4255-b4a8-8a147270f5f2 req-2a4d3c60-cee0-4e26-9fee-758fde144f87 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Refreshing network info cache for port d6e5e759-86e1-4f76-9b65-19b2691780df {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1004.332632] env[63345]: DEBUG nova.compute.manager [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1004.333646] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0aa62d-66fe-4b4e-bead-a3c70a08b4ae {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.350910] env[63345]: INFO nova.compute.manager [-] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Took 1.50 seconds to deallocate network for instance. [ 1004.360387] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017660, 'name': RemoveSnapshot_Task} progress is 56%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.385572] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d7e38e-44e0-4141-aa17-eb257aa7656f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.396343] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39adea6f-64be-4894-a277-4b9250640be0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.431054] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c3447d5-bb0f-4801-992c-f99369ad0040 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.441403] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad76667-f0dd-4f96-a1bd-9b6b4efe08e9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.460564] env[63345]: DEBUG nova.compute.provider_tree [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1004.512869] env[63345]: DEBUG oslo_vmware.api [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Task: {'id': task-1017665, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166905} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.513181] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1004.513401] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1004.513592] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1004.513776] env[63345]: INFO nova.compute.manager [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1004.514162] env[63345]: DEBUG oslo.service.loopingcall [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1004.514265] env[63345]: DEBUG nova.compute.manager [-] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1004.514363] env[63345]: DEBUG nova.network.neutron [-] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1004.566620] env[63345]: DEBUG oslo_vmware.api [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Task: {'id': task-1017666, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156087} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.566911] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1004.567210] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1004.567330] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1004.568684] env[63345]: INFO nova.compute.manager [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1004.568684] env[63345]: DEBUG oslo.service.loopingcall [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1004.568684] env[63345]: DEBUG nova.compute.manager [-] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1004.568684] env[63345]: DEBUG nova.network.neutron [-] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1004.729146] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017667, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.788428] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1e7ece6b-bc63-4841-8e73-7426c4d38a0d tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "9aa651b8-317d-4153-8c33-9df0a5d16115" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.319s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.838859] env[63345]: DEBUG nova.network.neutron [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Updated VIF entry in instance network info cache for port 8a3e5f64-f812-4c1b-a9e0-b8b3146a1467. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1004.839285] env[63345]: DEBUG nova.network.neutron [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Updating instance_info_cache with network_info: [{"id": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "address": "fa:16:3e:55:5a:7c", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a3e5f64-f8", "ovs_interfaceid": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.851660] env[63345]: INFO nova.compute.manager [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] instance snapshotting [ 1004.852257] env[63345]: DEBUG nova.objects.instance [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lazy-loading 'flavor' on Instance uuid 726332dd-8699-49a4-a9ea-b9cbfc159855 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1004.860054] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.860482] env[63345]: DEBUG oslo_vmware.api [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017660, 'name': RemoveSnapshot_Task, 'duration_secs': 1.256102} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.861382] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Deleted Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1004.861747] env[63345]: INFO nova.compute.manager [None req-abe292e0-35fe-4a5b-b16a-a911f1062318 tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Took 17.65 seconds to snapshot the instance on the hypervisor. [ 1004.981553] env[63345]: ERROR nova.scheduler.client.report [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [req-4a5b8618-6bb3-40f0-b811-9636d13d7852] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4a5b8618-6bb3-40f0-b811-9636d13d7852"}]} [ 1004.997882] env[63345]: DEBUG nova.scheduler.client.report [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1005.019291] env[63345]: DEBUG nova.scheduler.client.report [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1005.019533] env[63345]: DEBUG nova.compute.provider_tree [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1005.032352] env[63345]: DEBUG nova.scheduler.client.report [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1005.040982] env[63345]: DEBUG nova.network.neutron [req-bead40e2-e280-4255-b4a8-8a147270f5f2 req-2a4d3c60-cee0-4e26-9fee-758fde144f87 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Updated VIF entry in instance network info cache for port d6e5e759-86e1-4f76-9b65-19b2691780df. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1005.041372] env[63345]: DEBUG nova.network.neutron [req-bead40e2-e280-4255-b4a8-8a147270f5f2 req-2a4d3c60-cee0-4e26-9fee-758fde144f87 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Updating instance_info_cache with network_info: [{"id": "d6e5e759-86e1-4f76-9b65-19b2691780df", "address": "fa:16:3e:21:49:6e", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6e5e759-86", "ovs_interfaceid": "d6e5e759-86e1-4f76-9b65-19b2691780df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.056017] env[63345]: DEBUG nova.scheduler.client.report [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1005.229324] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017667, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53327} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.229569] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a/0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1005.229846] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1005.230145] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-153bc01c-89a8-4bf4-b8d7-49f939196c90 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.237804] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1005.237804] env[63345]: value = "task-1017668" [ 1005.237804] env[63345]: _type = "Task" [ 1005.237804] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.247536] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017668, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.259604] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b88f40-8eb5-40de-a743-3490cade4ac5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.266219] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3b5ab9-1d06-46fc-a98a-26c8b88d65cd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.297029] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d09b81-aa69-4e90-bb02-3f09548019eb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.303817] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dfa127b-b880-48a2-91f9-38c589c6c6b8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.307797] env[63345]: DEBUG nova.network.neutron [-] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.318139] env[63345]: DEBUG nova.compute.provider_tree [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1005.341678] env[63345]: DEBUG oslo_concurrency.lockutils [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] Releasing lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.341944] env[63345]: DEBUG nova.compute.manager [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Received event network-changed-d6e5e759-86e1-4f76-9b65-19b2691780df {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1005.342197] env[63345]: DEBUG nova.compute.manager [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Refreshing instance network info cache due to event network-changed-d6e5e759-86e1-4f76-9b65-19b2691780df. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1005.342415] env[63345]: DEBUG oslo_concurrency.lockutils [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] Acquiring lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.364115] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-341fea9f-6d84-4f9f-9dda-526f7656c6ca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.385412] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d876ea50-02e8-496f-9f49-829fa6325307 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.439155] env[63345]: DEBUG nova.compute.manager [req-93ed88b4-3de4-4889-b463-f0db3394c43c req-9322544c-b2e9-4f88-932c-046c29315d49 service nova] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Received event network-vif-deleted-8aa59061-70d8-466e-83b6-d91bcc0101d2 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1005.441819] env[63345]: DEBUG nova.network.neutron [-] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.545351] env[63345]: DEBUG oslo_concurrency.lockutils [req-bead40e2-e280-4255-b4a8-8a147270f5f2 req-2a4d3c60-cee0-4e26-9fee-758fde144f87 service nova] Releasing lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.545863] env[63345]: DEBUG oslo_concurrency.lockutils [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] Acquired lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.546135] env[63345]: DEBUG nova.network.neutron [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Refreshing network info cache for port d6e5e759-86e1-4f76-9b65-19b2691780df {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1005.568720] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "9aa651b8-317d-4153-8c33-9df0a5d16115" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.568720] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "9aa651b8-317d-4153-8c33-9df0a5d16115" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.568849] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "9aa651b8-317d-4153-8c33-9df0a5d16115-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.568924] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "9aa651b8-317d-4153-8c33-9df0a5d16115-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.569120] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "9aa651b8-317d-4153-8c33-9df0a5d16115-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.571104] env[63345]: INFO nova.compute.manager [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Terminating instance [ 1005.748129] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017668, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063466} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.748521] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1005.749211] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a76bea-016b-4869-bc5e-ce43a6668e30 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.772282] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a/0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1005.772571] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6ec7da1-40dd-46c8-b0ba-f5d464c98bf7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.791353] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1005.791353] env[63345]: value = "task-1017669" [ 1005.791353] env[63345]: _type = "Task" [ 1005.791353] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.799089] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017669, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.810806] env[63345]: INFO nova.compute.manager [-] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Took 1.30 seconds to deallocate network for instance. [ 1005.822175] env[63345]: DEBUG nova.scheduler.client.report [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1005.896263] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Creating Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1005.896582] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-fa1969c7-9470-4ec3-a048-37031b9777b1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.903685] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1005.903685] env[63345]: value = "task-1017670" [ 1005.903685] env[63345]: _type = "Task" [ 1005.903685] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.912197] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017670, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.943969] env[63345]: INFO nova.compute.manager [-] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Took 1.38 seconds to deallocate network for instance. [ 1006.074652] env[63345]: DEBUG nova.compute.manager [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1006.074906] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1006.076335] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632b5138-11c8-4094-8ccc-8a6d7b969396 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.085870] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1006.086506] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca431b69-eb33-4f65-b8cb-36d22e29c0e8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.093250] env[63345]: DEBUG oslo_vmware.api [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1006.093250] env[63345]: value = "task-1017671" [ 1006.093250] env[63345]: _type = "Task" [ 1006.093250] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.102135] env[63345]: DEBUG oslo_vmware.api [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017671, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.264704] env[63345]: DEBUG nova.network.neutron [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Updated VIF entry in instance network info cache for port d6e5e759-86e1-4f76-9b65-19b2691780df. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1006.265200] env[63345]: DEBUG nova.network.neutron [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Updating instance_info_cache with network_info: [{"id": "d6e5e759-86e1-4f76-9b65-19b2691780df", "address": "fa:16:3e:21:49:6e", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6e5e759-86", "ovs_interfaceid": "d6e5e759-86e1-4f76-9b65-19b2691780df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.303788] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017669, 'name': ReconfigVM_Task, 'duration_secs': 0.308927} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.304125] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a/0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1006.304742] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c3fe930f-faff-4cad-aac1-072c2a8a146a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.310930] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1006.310930] env[63345]: value = "task-1017672" [ 1006.310930] env[63345]: _type = "Task" [ 1006.310930] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.319783] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.320092] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017672, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.326222] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.203s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.326753] env[63345]: DEBUG nova.compute.manager [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1006.329343] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.079s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.330809] env[63345]: INFO nova.compute.claims [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1006.369141] env[63345]: DEBUG nova.compute.manager [req-5efeb3ca-2e82-415d-8e8d-7d5dbe56f600 req-f88b433c-f94c-409b-a7b9-9cedf4489d72 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Received event network-changed-8a3e5f64-f812-4c1b-a9e0-b8b3146a1467 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1006.369141] env[63345]: DEBUG nova.compute.manager [req-5efeb3ca-2e82-415d-8e8d-7d5dbe56f600 req-f88b433c-f94c-409b-a7b9-9cedf4489d72 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Refreshing instance network info cache due to event network-changed-8a3e5f64-f812-4c1b-a9e0-b8b3146a1467. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1006.369141] env[63345]: DEBUG oslo_concurrency.lockutils [req-5efeb3ca-2e82-415d-8e8d-7d5dbe56f600 req-f88b433c-f94c-409b-a7b9-9cedf4489d72 service nova] Acquiring lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.369141] env[63345]: DEBUG oslo_concurrency.lockutils [req-5efeb3ca-2e82-415d-8e8d-7d5dbe56f600 req-f88b433c-f94c-409b-a7b9-9cedf4489d72 service nova] Acquired lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.369692] env[63345]: DEBUG nova.network.neutron [req-5efeb3ca-2e82-415d-8e8d-7d5dbe56f600 req-f88b433c-f94c-409b-a7b9-9cedf4489d72 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Refreshing network info cache for port 8a3e5f64-f812-4c1b-a9e0-b8b3146a1467 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1006.413974] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017670, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.450018] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.602783] env[63345]: DEBUG oslo_vmware.api [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017671, 'name': PowerOffVM_Task, 'duration_secs': 0.232665} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.603093] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1006.603308] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1006.603613] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0394b459-1771-4e0d-b864-26761d748249 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.678398] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1006.678617] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1006.678806] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Deleting the datastore file [datastore2] 9aa651b8-317d-4153-8c33-9df0a5d16115 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1006.679095] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b880f191-1d94-4f51-bf69-cd83f2dfa15d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.685720] env[63345]: DEBUG oslo_vmware.api [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1006.685720] env[63345]: value = "task-1017674" [ 1006.685720] env[63345]: _type = "Task" [ 1006.685720] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.689484] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Acquiring lock "b3f20003-f75d-4d9f-bb4a-02d2930054a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.689709] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Lock "b3f20003-f75d-4d9f-bb4a-02d2930054a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.689919] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Acquiring lock "b3f20003-f75d-4d9f-bb4a-02d2930054a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.690133] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Lock "b3f20003-f75d-4d9f-bb4a-02d2930054a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.690300] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Lock "b3f20003-f75d-4d9f-bb4a-02d2930054a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.694769] env[63345]: DEBUG oslo_vmware.api [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017674, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.695235] env[63345]: INFO nova.compute.manager [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Terminating instance [ 1006.767724] env[63345]: DEBUG oslo_concurrency.lockutils [req-3a75cadd-3785-48b3-9b5b-e7f5aaae46b9 req-f318d40a-44fd-482d-a6fd-e474bded1942 service nova] Releasing lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.820723] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017672, 'name': Rename_Task, 'duration_secs': 0.151858} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.820999] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1006.821259] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-426ce747-e295-44da-809b-3ef136d75790 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.828209] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1006.828209] env[63345]: value = "task-1017675" [ 1006.828209] env[63345]: _type = "Task" [ 1006.828209] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.836226] env[63345]: DEBUG nova.compute.utils [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1006.840776] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017675, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.840776] env[63345]: DEBUG nova.compute.manager [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1006.840939] env[63345]: DEBUG nova.network.neutron [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1006.888784] env[63345]: DEBUG nova.policy [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fce058d27d8e4da19af436b282b37f32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63d7b3facae6416989f763e610cf98f7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 1006.913753] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017670, 'name': CreateSnapshot_Task, 'duration_secs': 0.66073} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.914371] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Created Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1006.915138] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9621422e-57a1-4934-816a-b1026b9350b9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.087885] env[63345]: DEBUG nova.network.neutron [req-5efeb3ca-2e82-415d-8e8d-7d5dbe56f600 req-f88b433c-f94c-409b-a7b9-9cedf4489d72 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Updated VIF entry in instance network info cache for port 8a3e5f64-f812-4c1b-a9e0-b8b3146a1467. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1007.088278] env[63345]: DEBUG nova.network.neutron [req-5efeb3ca-2e82-415d-8e8d-7d5dbe56f600 req-f88b433c-f94c-409b-a7b9-9cedf4489d72 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Updating instance_info_cache with network_info: [{"id": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "address": "fa:16:3e:55:5a:7c", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a3e5f64-f8", "ovs_interfaceid": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.146198] env[63345]: DEBUG nova.network.neutron [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Successfully created port: 0b7f6d00-228a-4a62-a372-c1c21d8d6aa3 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1007.195515] env[63345]: DEBUG oslo_vmware.api [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017674, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143635} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.195771] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1007.195961] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1007.196215] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1007.196418] env[63345]: INFO nova.compute.manager [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1007.196670] env[63345]: DEBUG oslo.service.loopingcall [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1007.196875] env[63345]: DEBUG nova.compute.manager [-] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1007.196947] env[63345]: DEBUG nova.network.neutron [-] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1007.198971] env[63345]: DEBUG nova.compute.manager [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1007.199186] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1007.200019] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcae2579-b11b-45e9-84b1-61543cbb3fa2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.207098] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1007.207361] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de91e94e-7b6b-4aa7-b6a5-455a376a4332 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.213869] env[63345]: DEBUG oslo_vmware.api [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the task: (returnval){ [ 1007.213869] env[63345]: value = "task-1017676" [ 1007.213869] env[63345]: _type = "Task" [ 1007.213869] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.222076] env[63345]: DEBUG oslo_vmware.api [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017676, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.339357] env[63345]: DEBUG nova.compute.manager [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1007.341996] env[63345]: DEBUG oslo_vmware.api [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017675, 'name': PowerOnVM_Task, 'duration_secs': 0.449309} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.342518] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1007.342765] env[63345]: INFO nova.compute.manager [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Took 7.90 seconds to spawn the instance on the hypervisor. [ 1007.342967] env[63345]: DEBUG nova.compute.manager [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1007.346583] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75add52d-b661-4098-accd-0f55808b5c1a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.437631] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Creating linked-clone VM from snapshot {{(pid=63345) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1007.438801] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c8fc508b-9566-4cc1-a77f-d228182745d5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.448077] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1007.448077] env[63345]: value = "task-1017677" [ 1007.448077] env[63345]: _type = "Task" [ 1007.448077] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.459841] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017677, 'name': CloneVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.591884] env[63345]: DEBUG oslo_concurrency.lockutils [req-5efeb3ca-2e82-415d-8e8d-7d5dbe56f600 req-f88b433c-f94c-409b-a7b9-9cedf4489d72 service nova] Releasing lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.591884] env[63345]: DEBUG nova.compute.manager [req-5efeb3ca-2e82-415d-8e8d-7d5dbe56f600 req-f88b433c-f94c-409b-a7b9-9cedf4489d72 service nova] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Received event network-vif-deleted-4dd934da-ef10-4472-8c9e-60266db841a4 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1007.619048] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a355f7-b0eb-44d7-a108-e66328ac1441 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.627999] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48293aae-4999-48ef-9da5-5e9f8cede0fb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.659131] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9edc7587-b99d-47e1-9196-400248b47e21 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.667441] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1459ddb4-7acc-4b52-961e-471a49cc688b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.686019] env[63345]: DEBUG nova.compute.provider_tree [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1007.726442] env[63345]: DEBUG oslo_vmware.api [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017676, 'name': PowerOffVM_Task, 'duration_secs': 0.246657} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.726442] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1007.726442] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1007.726442] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-88a9b7b4-c33a-496d-b61b-834b49113783 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.803785] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1007.804205] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Deleting contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1007.804279] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Deleting the datastore file [datastore1] b3f20003-f75d-4d9f-bb4a-02d2930054a8 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1007.804484] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca0f59f3-a407-4cc4-b10f-c825d63cbb72 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.811513] env[63345]: DEBUG oslo_vmware.api [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for the task: (returnval){ [ 1007.811513] env[63345]: value = "task-1017679" [ 1007.811513] env[63345]: _type = "Task" [ 1007.811513] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.819326] env[63345]: DEBUG oslo_vmware.api [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017679, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.861924] env[63345]: INFO nova.compute.manager [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Took 14.09 seconds to build instance. [ 1007.958273] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017677, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.194044] env[63345]: DEBUG nova.scheduler.client.report [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1008.322791] env[63345]: DEBUG oslo_vmware.api [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Task: {'id': task-1017679, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.277251} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.323167] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1008.323459] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Deleted contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1008.323776] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1008.324208] env[63345]: INFO nova.compute.manager [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1008.324587] env[63345]: DEBUG oslo.service.loopingcall [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1008.324945] env[63345]: DEBUG nova.compute.manager [-] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1008.325107] env[63345]: DEBUG nova.network.neutron [-] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1008.348944] env[63345]: DEBUG nova.compute.manager [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1008.363609] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9dab2264-f3b9-44ce-ae56-976ffa7a42ae tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.600s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.371782] env[63345]: DEBUG nova.virt.hardware [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1008.372058] env[63345]: DEBUG nova.virt.hardware [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1008.372345] env[63345]: DEBUG nova.virt.hardware [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1008.372602] env[63345]: DEBUG nova.virt.hardware [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1008.372814] env[63345]: DEBUG nova.virt.hardware [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1008.373037] env[63345]: DEBUG nova.virt.hardware [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1008.373364] env[63345]: DEBUG nova.virt.hardware [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1008.373613] env[63345]: DEBUG nova.virt.hardware [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1008.373850] env[63345]: DEBUG nova.virt.hardware [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1008.374087] env[63345]: DEBUG nova.virt.hardware [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1008.374334] env[63345]: DEBUG nova.virt.hardware [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1008.375283] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1740da-794e-4992-ad9d-84db2380fee8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.383747] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19520fa7-b312-4c36-ab9f-c5b5e5bf51bc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.403178] env[63345]: DEBUG nova.compute.manager [req-05b26d2a-952b-4c7d-86d1-1019fc8afba1 req-6682ce50-9e15-4253-8697-0f7462b0c2aa service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Received event network-vif-deleted-025d1e18-19a3-43ce-9db9-1590137a5544 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1008.403517] env[63345]: INFO nova.compute.manager [req-05b26d2a-952b-4c7d-86d1-1019fc8afba1 req-6682ce50-9e15-4253-8697-0f7462b0c2aa service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Neutron deleted interface 025d1e18-19a3-43ce-9db9-1590137a5544; detaching it from the instance and deleting it from the info cache [ 1008.403755] env[63345]: DEBUG nova.network.neutron [req-05b26d2a-952b-4c7d-86d1-1019fc8afba1 req-6682ce50-9e15-4253-8697-0f7462b0c2aa service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.454672] env[63345]: DEBUG nova.network.neutron [-] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.461591] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017677, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.704039] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.372s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.704039] env[63345]: DEBUG nova.compute.manager [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1008.704686] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.845s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.705076] env[63345]: DEBUG nova.objects.instance [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lazy-loading 'resources' on Instance uuid a0eb9dae-0d27-419f-9210-eaa445e564c8 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1008.755274] env[63345]: DEBUG nova.compute.manager [req-0ea0d976-543f-4b1e-a93f-f95deb2bbee5 req-d05c893a-b9b3-41ae-ba93-b06734d021da service nova] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Received event network-vif-plugged-0b7f6d00-228a-4a62-a372-c1c21d8d6aa3 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1008.755274] env[63345]: DEBUG oslo_concurrency.lockutils [req-0ea0d976-543f-4b1e-a93f-f95deb2bbee5 req-d05c893a-b9b3-41ae-ba93-b06734d021da service nova] Acquiring lock "34990fa5-4a89-4430-8ea7-9e73dd41f441-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.755274] env[63345]: DEBUG oslo_concurrency.lockutils [req-0ea0d976-543f-4b1e-a93f-f95deb2bbee5 req-d05c893a-b9b3-41ae-ba93-b06734d021da service nova] Lock "34990fa5-4a89-4430-8ea7-9e73dd41f441-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.755274] env[63345]: DEBUG oslo_concurrency.lockutils [req-0ea0d976-543f-4b1e-a93f-f95deb2bbee5 req-d05c893a-b9b3-41ae-ba93-b06734d021da service nova] Lock "34990fa5-4a89-4430-8ea7-9e73dd41f441-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.755274] env[63345]: DEBUG nova.compute.manager [req-0ea0d976-543f-4b1e-a93f-f95deb2bbee5 req-d05c893a-b9b3-41ae-ba93-b06734d021da service nova] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] No waiting events found dispatching network-vif-plugged-0b7f6d00-228a-4a62-a372-c1c21d8d6aa3 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1008.755274] env[63345]: WARNING nova.compute.manager [req-0ea0d976-543f-4b1e-a93f-f95deb2bbee5 req-d05c893a-b9b3-41ae-ba93-b06734d021da service nova] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Received unexpected event network-vif-plugged-0b7f6d00-228a-4a62-a372-c1c21d8d6aa3 for instance with vm_state building and task_state spawning. [ 1008.794897] env[63345]: DEBUG nova.network.neutron [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Successfully updated port: 0b7f6d00-228a-4a62-a372-c1c21d8d6aa3 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1008.915058] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e2c9516-ac9a-43c2-996f-1b9b0938ba42 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.925879] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44eecaec-3c54-49b3-922d-e804c54a1163 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.966349] env[63345]: INFO nova.compute.manager [-] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Took 1.77 seconds to deallocate network for instance. [ 1008.967104] env[63345]: DEBUG nova.compute.manager [req-05b26d2a-952b-4c7d-86d1-1019fc8afba1 req-6682ce50-9e15-4253-8697-0f7462b0c2aa service nova] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Detach interface failed, port_id=025d1e18-19a3-43ce-9db9-1590137a5544, reason: Instance 9aa651b8-317d-4153-8c33-9df0a5d16115 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1008.967593] env[63345]: DEBUG nova.compute.manager [req-05b26d2a-952b-4c7d-86d1-1019fc8afba1 req-6682ce50-9e15-4253-8697-0f7462b0c2aa service nova] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Received event network-changed-bca55223-b7b4-4623-abaf-4d4a68f5b7cc {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1008.967998] env[63345]: DEBUG nova.compute.manager [req-05b26d2a-952b-4c7d-86d1-1019fc8afba1 req-6682ce50-9e15-4253-8697-0f7462b0c2aa service nova] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Refreshing instance network info cache due to event network-changed-bca55223-b7b4-4623-abaf-4d4a68f5b7cc. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1008.968508] env[63345]: DEBUG oslo_concurrency.lockutils [req-05b26d2a-952b-4c7d-86d1-1019fc8afba1 req-6682ce50-9e15-4253-8697-0f7462b0c2aa service nova] Acquiring lock "refresh_cache-0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.969353] env[63345]: DEBUG oslo_concurrency.lockutils [req-05b26d2a-952b-4c7d-86d1-1019fc8afba1 req-6682ce50-9e15-4253-8697-0f7462b0c2aa service nova] Acquired lock "refresh_cache-0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.969785] env[63345]: DEBUG nova.network.neutron [req-05b26d2a-952b-4c7d-86d1-1019fc8afba1 req-6682ce50-9e15-4253-8697-0f7462b0c2aa service nova] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Refreshing network info cache for port bca55223-b7b4-4623-abaf-4d4a68f5b7cc {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1008.988533] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017677, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.213082] env[63345]: DEBUG nova.compute.utils [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1009.217454] env[63345]: DEBUG nova.compute.manager [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1009.217704] env[63345]: DEBUG nova.network.neutron [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1009.219529] env[63345]: DEBUG nova.network.neutron [-] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.263794] env[63345]: DEBUG nova.policy [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb6730bb6292421e8f943bce2e912bef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c535ae9067ab4e8a87e95c68af4624fb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 1009.296554] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "refresh_cache-34990fa5-4a89-4430-8ea7-9e73dd41f441" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.296707] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "refresh_cache-34990fa5-4a89-4430-8ea7-9e73dd41f441" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.296858] env[63345]: DEBUG nova.network.neutron [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1009.415276] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236fa273-38c2-4a1d-ad5d-4ae09e6fbc5e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.424373] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7e2b0a-18eb-4be9-b5ed-1f61c2f7345f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.463268] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc3fec23-3c76-4425-bae0-a67591043389 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.471947] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017677, 'name': CloneVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.475088] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4693abe4-5658-4be6-bcfb-f2865c4462ec {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.489055] env[63345]: DEBUG nova.compute.provider_tree [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1009.493651] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.537215] env[63345]: DEBUG nova.network.neutron [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Successfully created port: c5799dfa-7c87-4bbf-b2c7-28aef5b31d52 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1009.703123] env[63345]: DEBUG nova.network.neutron [req-05b26d2a-952b-4c7d-86d1-1019fc8afba1 req-6682ce50-9e15-4253-8697-0f7462b0c2aa service nova] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Updated VIF entry in instance network info cache for port bca55223-b7b4-4623-abaf-4d4a68f5b7cc. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1009.703123] env[63345]: DEBUG nova.network.neutron [req-05b26d2a-952b-4c7d-86d1-1019fc8afba1 req-6682ce50-9e15-4253-8697-0f7462b0c2aa service nova] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Updating instance_info_cache with network_info: [{"id": "bca55223-b7b4-4623-abaf-4d4a68f5b7cc", "address": "fa:16:3e:6d:80:f9", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbca55223-b7", "ovs_interfaceid": "bca55223-b7b4-4623-abaf-4d4a68f5b7cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.718473] env[63345]: DEBUG nova.compute.manager [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1009.726021] env[63345]: INFO nova.compute.manager [-] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Took 1.40 seconds to deallocate network for instance. [ 1009.838756] env[63345]: DEBUG nova.network.neutron [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1009.970280] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017677, 'name': CloneVM_Task, 'duration_secs': 2.03782} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.970679] env[63345]: INFO nova.virt.vmwareapi.vmops [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Created linked-clone VM from snapshot [ 1009.971474] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c2cb2e-e65d-4f39-8e55-8c76afced77c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.979107] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Uploading image a645599a-3478-4c19-9a4f-0d1504b5322e {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1010.009030] env[63345]: DEBUG oslo_vmware.rw_handles [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1010.009030] env[63345]: value = "vm-226142" [ 1010.009030] env[63345]: _type = "VirtualMachine" [ 1010.009030] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1010.009331] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6ec2df0c-b0bf-4cce-9ce4-469446aa2eaf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.016419] env[63345]: DEBUG oslo_vmware.rw_handles [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lease: (returnval){ [ 1010.016419] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b0bf90-87e9-e724-00b2-d6f1a7ac7b01" [ 1010.016419] env[63345]: _type = "HttpNfcLease" [ 1010.016419] env[63345]: } obtained for exporting VM: (result){ [ 1010.016419] env[63345]: value = "vm-226142" [ 1010.016419] env[63345]: _type = "VirtualMachine" [ 1010.016419] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1010.018242] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the lease: (returnval){ [ 1010.018242] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b0bf90-87e9-e724-00b2-d6f1a7ac7b01" [ 1010.018242] env[63345]: _type = "HttpNfcLease" [ 1010.018242] env[63345]: } to be ready. {{(pid=63345) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1010.023192] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1010.023192] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b0bf90-87e9-e724-00b2-d6f1a7ac7b01" [ 1010.023192] env[63345]: _type = "HttpNfcLease" [ 1010.023192] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1010.024195] env[63345]: DEBUG nova.scheduler.client.report [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 134 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1010.024509] env[63345]: DEBUG nova.compute.provider_tree [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 134 to 135 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1010.024706] env[63345]: DEBUG nova.compute.provider_tree [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1010.047478] env[63345]: DEBUG nova.network.neutron [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Updating instance_info_cache with network_info: [{"id": "0b7f6d00-228a-4a62-a372-c1c21d8d6aa3", "address": "fa:16:3e:9a:79:03", "network": {"id": "f05df594-fc76-4e2d-b29b-6942fee8dc99", "bridge": "br-int", "label": "tempest-ServersTestJSON-241206779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63d7b3facae6416989f763e610cf98f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b7f6d00-22", "ovs_interfaceid": "0b7f6d00-228a-4a62-a372-c1c21d8d6aa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.205135] env[63345]: DEBUG oslo_concurrency.lockutils [req-05b26d2a-952b-4c7d-86d1-1019fc8afba1 req-6682ce50-9e15-4253-8697-0f7462b0c2aa service nova] Releasing lock "refresh_cache-0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.231278] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.433208] env[63345]: DEBUG nova.compute.manager [req-d33b8a45-93fe-46ed-a0e5-307a4a957855 req-af82f521-6bf9-48d3-b0ee-2304073affa6 service nova] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Received event network-vif-deleted-eaf797ae-2e07-4553-aaab-deed7e3f45a1 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1010.525195] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1010.525195] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b0bf90-87e9-e724-00b2-d6f1a7ac7b01" [ 1010.525195] env[63345]: _type = "HttpNfcLease" [ 1010.525195] env[63345]: } is ready. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1010.525456] env[63345]: DEBUG oslo_vmware.rw_handles [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1010.525456] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b0bf90-87e9-e724-00b2-d6f1a7ac7b01" [ 1010.525456] env[63345]: _type = "HttpNfcLease" [ 1010.525456] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1010.526265] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3585f8f9-fa33-45b0-956f-3fd57ff2dc61 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.529484] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.825s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.534763] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.215s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.535056] env[63345]: DEBUG nova.objects.instance [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lazy-loading 'resources' on Instance uuid a8321259-b3a6-4e87-b13a-b964cf0dd766 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1010.536250] env[63345]: DEBUG oslo_vmware.rw_handles [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52457c22-3da5-ff6c-687e-fdfc78d95388/disk-0.vmdk from lease info. {{(pid=63345) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1010.536464] env[63345]: DEBUG oslo_vmware.rw_handles [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52457c22-3da5-ff6c-687e-fdfc78d95388/disk-0.vmdk for reading. {{(pid=63345) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1010.592757] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "refresh_cache-34990fa5-4a89-4430-8ea7-9e73dd41f441" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.593094] env[63345]: DEBUG nova.compute.manager [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Instance network_info: |[{"id": "0b7f6d00-228a-4a62-a372-c1c21d8d6aa3", "address": "fa:16:3e:9a:79:03", "network": {"id": "f05df594-fc76-4e2d-b29b-6942fee8dc99", "bridge": "br-int", "label": "tempest-ServersTestJSON-241206779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63d7b3facae6416989f763e610cf98f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b7f6d00-22", "ovs_interfaceid": "0b7f6d00-228a-4a62-a372-c1c21d8d6aa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1010.594973] env[63345]: INFO nova.scheduler.client.report [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Deleted allocations for instance a0eb9dae-0d27-419f-9210-eaa445e564c8 [ 1010.597031] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:79:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0b7f6d00-228a-4a62-a372-c1c21d8d6aa3', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1010.603450] env[63345]: DEBUG oslo.service.loopingcall [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1010.606766] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1010.607598] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e92aacdc-b118-4561-b76b-c42bc8256831 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.634312] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1010.634312] env[63345]: value = "task-1017682" [ 1010.634312] env[63345]: _type = "Task" [ 1010.634312] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.643847] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017682, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.683057] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-74f9146e-d02e-471a-bc84-26e71f164493 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.727907] env[63345]: DEBUG nova.compute.manager [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1010.747443] env[63345]: DEBUG nova.virt.hardware [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1010.747719] env[63345]: DEBUG nova.virt.hardware [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1010.747882] env[63345]: DEBUG nova.virt.hardware [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1010.748087] env[63345]: DEBUG nova.virt.hardware [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1010.748323] env[63345]: DEBUG nova.virt.hardware [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1010.748514] env[63345]: DEBUG nova.virt.hardware [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1010.748940] env[63345]: DEBUG nova.virt.hardware [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1010.749131] env[63345]: DEBUG nova.virt.hardware [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1010.749314] env[63345]: DEBUG nova.virt.hardware [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1010.749488] env[63345]: DEBUG nova.virt.hardware [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1010.749665] env[63345]: DEBUG nova.virt.hardware [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1010.750556] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc61dbc-d57a-4964-a30c-5516bbc626ff {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.759634] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78947756-2cac-46f3-b261-a0e5bcdb2bc3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.778025] env[63345]: DEBUG nova.compute.manager [req-591d2187-9f00-4a84-850e-9056a82f170b req-8124209a-af68-43aa-accb-34521072331e service nova] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Received event network-changed-0b7f6d00-228a-4a62-a372-c1c21d8d6aa3 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1010.778293] env[63345]: DEBUG nova.compute.manager [req-591d2187-9f00-4a84-850e-9056a82f170b req-8124209a-af68-43aa-accb-34521072331e service nova] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Refreshing instance network info cache due to event network-changed-0b7f6d00-228a-4a62-a372-c1c21d8d6aa3. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1010.778370] env[63345]: DEBUG oslo_concurrency.lockutils [req-591d2187-9f00-4a84-850e-9056a82f170b req-8124209a-af68-43aa-accb-34521072331e service nova] Acquiring lock "refresh_cache-34990fa5-4a89-4430-8ea7-9e73dd41f441" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1010.778516] env[63345]: DEBUG oslo_concurrency.lockutils [req-591d2187-9f00-4a84-850e-9056a82f170b req-8124209a-af68-43aa-accb-34521072331e service nova] Acquired lock "refresh_cache-34990fa5-4a89-4430-8ea7-9e73dd41f441" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.778678] env[63345]: DEBUG nova.network.neutron [req-591d2187-9f00-4a84-850e-9056a82f170b req-8124209a-af68-43aa-accb-34521072331e service nova] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Refreshing network info cache for port 0b7f6d00-228a-4a62-a372-c1c21d8d6aa3 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1011.057242] env[63345]: DEBUG nova.network.neutron [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Successfully updated port: c5799dfa-7c87-4bbf-b2c7-28aef5b31d52 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1011.110721] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ca306e44-3946-42a1-84bd-faab172327f6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "a0eb9dae-0d27-419f-9210-eaa445e564c8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.884s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.153872] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017682, 'name': CreateVM_Task, 'duration_secs': 0.382696} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.154986] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1011.155705] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.155887] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.156365] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1011.159782] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c50e2de7-bf36-47a6-a8e0-eff33d76470a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.165332] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 1011.165332] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f4baef-0d6f-f7f5-ada3-0561d1203b93" [ 1011.165332] env[63345]: _type = "Task" [ 1011.165332] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.174186] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f4baef-0d6f-f7f5-ada3-0561d1203b93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.235439] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f4e536e-8305-4fb5-87f7-5e1b30f71821 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.246904] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e10ce1-5022-497f-8ebc-23e49597e833 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.289277] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789b227b-2fa8-4ed7-8ab0-457d7b16ec47 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.300982] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e7d1c4-4452-4624-92d7-dd7f15a4b70b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.318971] env[63345]: DEBUG nova.compute.provider_tree [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.524809] env[63345]: DEBUG nova.network.neutron [req-591d2187-9f00-4a84-850e-9056a82f170b req-8124209a-af68-43aa-accb-34521072331e service nova] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Updated VIF entry in instance network info cache for port 0b7f6d00-228a-4a62-a372-c1c21d8d6aa3. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1011.525294] env[63345]: DEBUG nova.network.neutron [req-591d2187-9f00-4a84-850e-9056a82f170b req-8124209a-af68-43aa-accb-34521072331e service nova] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Updating instance_info_cache with network_info: [{"id": "0b7f6d00-228a-4a62-a372-c1c21d8d6aa3", "address": "fa:16:3e:9a:79:03", "network": {"id": "f05df594-fc76-4e2d-b29b-6942fee8dc99", "bridge": "br-int", "label": "tempest-ServersTestJSON-241206779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63d7b3facae6416989f763e610cf98f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b7f6d00-22", "ovs_interfaceid": "0b7f6d00-228a-4a62-a372-c1c21d8d6aa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.563456] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "refresh_cache-a7d80763-92f0-45a9-b24b-1f973bffb376" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.563712] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "refresh_cache-a7d80763-92f0-45a9-b24b-1f973bffb376" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.563983] env[63345]: DEBUG nova.network.neutron [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1011.677759] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f4baef-0d6f-f7f5-ada3-0561d1203b93, 'name': SearchDatastore_Task, 'duration_secs': 0.013695} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.678183] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.678488] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1011.678814] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.679034] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.679273] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1011.679591] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70e7d318-03a4-48c9-bc29-d68aec103ac9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.688944] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1011.689214] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1011.689969] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f917ae86-9029-44b2-b0ab-d2e4be12e33f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.695997] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 1011.695997] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52493dc6-bd06-9272-c184-ecef8b3938a7" [ 1011.695997] env[63345]: _type = "Task" [ 1011.695997] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.704170] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52493dc6-bd06-9272-c184-ecef8b3938a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.822549] env[63345]: DEBUG nova.scheduler.client.report [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1011.977673] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e336a3be-cd76-47bd-9cd6-2b52e78c4aab tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "5e20b33c-1481-4bd3-b269-29a70cc3150d" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.978831] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e336a3be-cd76-47bd-9cd6-2b52e78c4aab tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "5e20b33c-1481-4bd3-b269-29a70cc3150d" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.028481] env[63345]: DEBUG oslo_concurrency.lockutils [req-591d2187-9f00-4a84-850e-9056a82f170b req-8124209a-af68-43aa-accb-34521072331e service nova] Releasing lock "refresh_cache-34990fa5-4a89-4430-8ea7-9e73dd41f441" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.100654] env[63345]: DEBUG nova.network.neutron [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1012.206749] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52493dc6-bd06-9272-c184-ecef8b3938a7, 'name': SearchDatastore_Task, 'duration_secs': 0.017652} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.207696] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-417ccfc7-e9c5-46f0-b782-19d3a2ba7192 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.213307] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 1012.213307] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]522463b6-28ac-ccaa-dc5d-8c3247af0e26" [ 1012.213307] env[63345]: _type = "Task" [ 1012.213307] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.222040] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]522463b6-28ac-ccaa-dc5d-8c3247af0e26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.234720] env[63345]: DEBUG nova.network.neutron [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Updating instance_info_cache with network_info: [{"id": "c5799dfa-7c87-4bbf-b2c7-28aef5b31d52", "address": "fa:16:3e:16:e4:0a", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5799dfa-7c", "ovs_interfaceid": "c5799dfa-7c87-4bbf-b2c7-28aef5b31d52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.328278] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.793s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.332429] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.882s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.332817] env[63345]: DEBUG nova.objects.instance [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Lazy-loading 'resources' on Instance uuid 3d1e47c5-7e8c-417c-8c7c-009db666d391 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1012.359619] env[63345]: INFO nova.scheduler.client.report [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Deleted allocations for instance a8321259-b3a6-4e87-b13a-b964cf0dd766 [ 1012.483642] env[63345]: INFO nova.compute.manager [None req-e336a3be-cd76-47bd-9cd6-2b52e78c4aab tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Detaching volume 5e6c8d6d-97f5-444a-b63d-e2544785247a [ 1012.518599] env[63345]: INFO nova.virt.block_device [None req-e336a3be-cd76-47bd-9cd6-2b52e78c4aab tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Attempting to driver detach volume 5e6c8d6d-97f5-444a-b63d-e2544785247a from mountpoint /dev/sdb [ 1012.519198] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-e336a3be-cd76-47bd-9cd6-2b52e78c4aab tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Volume detach. Driver type: vmdk {{(pid=63345) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1012.519575] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-e336a3be-cd76-47bd-9cd6-2b52e78c4aab tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226098', 'volume_id': '5e6c8d6d-97f5-444a-b63d-e2544785247a', 'name': 'volume-5e6c8d6d-97f5-444a-b63d-e2544785247a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5e20b33c-1481-4bd3-b269-29a70cc3150d', 'attached_at': '', 'detached_at': '', 'volume_id': '5e6c8d6d-97f5-444a-b63d-e2544785247a', 'serial': '5e6c8d6d-97f5-444a-b63d-e2544785247a'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1012.520757] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53beef2-add4-466a-a351-03915d916afe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.545699] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-590f1ed3-229b-4b85-bf1f-ee2852e8a708 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.553935] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb05800-e2d6-4298-94a2-a576185705b8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.577064] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559f6227-1e0e-4740-927d-6b10feb5978f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.596044] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-e336a3be-cd76-47bd-9cd6-2b52e78c4aab tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] The volume has not been displaced from its original location: [datastore2] volume-5e6c8d6d-97f5-444a-b63d-e2544785247a/volume-5e6c8d6d-97f5-444a-b63d-e2544785247a.vmdk. No consolidation needed. {{(pid=63345) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1012.601671] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-e336a3be-cd76-47bd-9cd6-2b52e78c4aab tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Reconfiguring VM instance instance-0000004d to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1012.602034] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ef8470b-1e35-4654-b15f-89265ee4441a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.623062] env[63345]: DEBUG oslo_vmware.api [None req-e336a3be-cd76-47bd-9cd6-2b52e78c4aab tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1012.623062] env[63345]: value = "task-1017684" [ 1012.623062] env[63345]: _type = "Task" [ 1012.623062] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.634845] env[63345]: DEBUG oslo_vmware.api [None req-e336a3be-cd76-47bd-9cd6-2b52e78c4aab tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017684, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.724760] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]522463b6-28ac-ccaa-dc5d-8c3247af0e26, 'name': SearchDatastore_Task, 'duration_secs': 0.011581} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.725040] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.725309] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 34990fa5-4a89-4430-8ea7-9e73dd41f441/34990fa5-4a89-4430-8ea7-9e73dd41f441.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1012.725585] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac5743ec-5ffb-4ebd-abe7-187aa377b189 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.734311] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 1012.734311] env[63345]: value = "task-1017685" [ 1012.734311] env[63345]: _type = "Task" [ 1012.734311] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.737860] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "refresh_cache-a7d80763-92f0-45a9-b24b-1f973bffb376" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.738176] env[63345]: DEBUG nova.compute.manager [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Instance network_info: |[{"id": "c5799dfa-7c87-4bbf-b2c7-28aef5b31d52", "address": "fa:16:3e:16:e4:0a", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5799dfa-7c", "ovs_interfaceid": "c5799dfa-7c87-4bbf-b2c7-28aef5b31d52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1012.738615] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:e4:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f77ff7a1-209c-4f3f-b2a0-fd817741e739', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5799dfa-7c87-4bbf-b2c7-28aef5b31d52', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1012.746126] env[63345]: DEBUG oslo.service.loopingcall [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1012.747129] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1012.747384] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aea6b12f-8684-4e3d-9f5d-bc9f6ea0c0de {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.765433] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017685, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.772620] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1012.772620] env[63345]: value = "task-1017686" [ 1012.772620] env[63345]: _type = "Task" [ 1012.772620] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.782236] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017686, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.804792] env[63345]: DEBUG nova.compute.manager [req-68de68cd-e34a-4eb8-ad2b-48049277aa1f req-558c3ac7-2531-4733-af64-04193b74335a service nova] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Received event network-vif-plugged-c5799dfa-7c87-4bbf-b2c7-28aef5b31d52 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1012.805085] env[63345]: DEBUG oslo_concurrency.lockutils [req-68de68cd-e34a-4eb8-ad2b-48049277aa1f req-558c3ac7-2531-4733-af64-04193b74335a service nova] Acquiring lock "a7d80763-92f0-45a9-b24b-1f973bffb376-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.805351] env[63345]: DEBUG oslo_concurrency.lockutils [req-68de68cd-e34a-4eb8-ad2b-48049277aa1f req-558c3ac7-2531-4733-af64-04193b74335a service nova] Lock "a7d80763-92f0-45a9-b24b-1f973bffb376-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.805480] env[63345]: DEBUG oslo_concurrency.lockutils [req-68de68cd-e34a-4eb8-ad2b-48049277aa1f req-558c3ac7-2531-4733-af64-04193b74335a service nova] Lock "a7d80763-92f0-45a9-b24b-1f973bffb376-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.805662] env[63345]: DEBUG nova.compute.manager [req-68de68cd-e34a-4eb8-ad2b-48049277aa1f req-558c3ac7-2531-4733-af64-04193b74335a service nova] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] No waiting events found dispatching network-vif-plugged-c5799dfa-7c87-4bbf-b2c7-28aef5b31d52 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1012.805838] env[63345]: WARNING nova.compute.manager [req-68de68cd-e34a-4eb8-ad2b-48049277aa1f req-558c3ac7-2531-4733-af64-04193b74335a service nova] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Received unexpected event network-vif-plugged-c5799dfa-7c87-4bbf-b2c7-28aef5b31d52 for instance with vm_state building and task_state spawning. [ 1012.806012] env[63345]: DEBUG nova.compute.manager [req-68de68cd-e34a-4eb8-ad2b-48049277aa1f req-558c3ac7-2531-4733-af64-04193b74335a service nova] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Received event network-changed-c5799dfa-7c87-4bbf-b2c7-28aef5b31d52 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1012.806365] env[63345]: DEBUG nova.compute.manager [req-68de68cd-e34a-4eb8-ad2b-48049277aa1f req-558c3ac7-2531-4733-af64-04193b74335a service nova] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Refreshing instance network info cache due to event network-changed-c5799dfa-7c87-4bbf-b2c7-28aef5b31d52. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1012.806566] env[63345]: DEBUG oslo_concurrency.lockutils [req-68de68cd-e34a-4eb8-ad2b-48049277aa1f req-558c3ac7-2531-4733-af64-04193b74335a service nova] Acquiring lock "refresh_cache-a7d80763-92f0-45a9-b24b-1f973bffb376" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.806709] env[63345]: DEBUG oslo_concurrency.lockutils [req-68de68cd-e34a-4eb8-ad2b-48049277aa1f req-558c3ac7-2531-4733-af64-04193b74335a service nova] Acquired lock "refresh_cache-a7d80763-92f0-45a9-b24b-1f973bffb376" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.806870] env[63345]: DEBUG nova.network.neutron [req-68de68cd-e34a-4eb8-ad2b-48049277aa1f req-558c3ac7-2531-4733-af64-04193b74335a service nova] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Refreshing network info cache for port c5799dfa-7c87-4bbf-b2c7-28aef5b31d52 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1012.869042] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ba8fb920-8da1-40e1-ac80-0fe917bfba95 tempest-ServerDiskConfigTestJSON-2090373809 tempest-ServerDiskConfigTestJSON-2090373809-project-member] Lock "a8321259-b3a6-4e87-b13a-b964cf0dd766" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.998s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.039341] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a3ef08-96cc-4ac0-b019-59660f4794ef {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.049366] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6a3449-4fae-4c41-94ba-e408d7af7afc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.086037] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ba7380-7324-4015-bd97-2c68677a8994 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.095773] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3877448e-0543-4b7d-b1cf-2c14429028aa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.112381] env[63345]: DEBUG nova.compute.provider_tree [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1013.133931] env[63345]: DEBUG oslo_vmware.api [None req-e336a3be-cd76-47bd-9cd6-2b52e78c4aab tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017684, 'name': ReconfigVM_Task, 'duration_secs': 0.390743} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.134255] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-e336a3be-cd76-47bd-9cd6-2b52e78c4aab tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Reconfigured VM instance instance-0000004d to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1013.139073] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f08c1cd9-4a62-4f6d-a327-a73518cc4f41 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.158531] env[63345]: DEBUG oslo_vmware.api [None req-e336a3be-cd76-47bd-9cd6-2b52e78c4aab tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1013.158531] env[63345]: value = "task-1017687" [ 1013.158531] env[63345]: _type = "Task" [ 1013.158531] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.168734] env[63345]: DEBUG oslo_vmware.api [None req-e336a3be-cd76-47bd-9cd6-2b52e78c4aab tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017687, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.247235] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017685, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.285955] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017686, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.561173] env[63345]: DEBUG nova.network.neutron [req-68de68cd-e34a-4eb8-ad2b-48049277aa1f req-558c3ac7-2531-4733-af64-04193b74335a service nova] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Updated VIF entry in instance network info cache for port c5799dfa-7c87-4bbf-b2c7-28aef5b31d52. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1013.561583] env[63345]: DEBUG nova.network.neutron [req-68de68cd-e34a-4eb8-ad2b-48049277aa1f req-558c3ac7-2531-4733-af64-04193b74335a service nova] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Updating instance_info_cache with network_info: [{"id": "c5799dfa-7c87-4bbf-b2c7-28aef5b31d52", "address": "fa:16:3e:16:e4:0a", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5799dfa-7c", "ovs_interfaceid": "c5799dfa-7c87-4bbf-b2c7-28aef5b31d52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.615504] env[63345]: DEBUG nova.scheduler.client.report [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1013.670106] env[63345]: DEBUG oslo_vmware.api [None req-e336a3be-cd76-47bd-9cd6-2b52e78c4aab tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017687, 'name': ReconfigVM_Task, 'duration_secs': 0.273169} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.670433] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-e336a3be-cd76-47bd-9cd6-2b52e78c4aab tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226098', 'volume_id': '5e6c8d6d-97f5-444a-b63d-e2544785247a', 'name': 'volume-5e6c8d6d-97f5-444a-b63d-e2544785247a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5e20b33c-1481-4bd3-b269-29a70cc3150d', 'attached_at': '', 'detached_at': '', 'volume_id': '5e6c8d6d-97f5-444a-b63d-e2544785247a', 'serial': '5e6c8d6d-97f5-444a-b63d-e2544785247a'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1013.747161] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017685, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.920266} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.747443] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 34990fa5-4a89-4430-8ea7-9e73dd41f441/34990fa5-4a89-4430-8ea7-9e73dd41f441.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1013.747672] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1013.747924] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-28a3fbd6-f5b0-451c-aae7-b2d9a36c7635 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.755582] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 1013.755582] env[63345]: value = "task-1017688" [ 1013.755582] env[63345]: _type = "Task" [ 1013.755582] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.765746] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017688, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.783991] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017686, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.065603] env[63345]: DEBUG oslo_concurrency.lockutils [req-68de68cd-e34a-4eb8-ad2b-48049277aa1f req-558c3ac7-2531-4733-af64-04193b74335a service nova] Releasing lock "refresh_cache-a7d80763-92f0-45a9-b24b-1f973bffb376" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.121079] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.789s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.123530] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.630s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.123804] env[63345]: DEBUG nova.objects.instance [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lazy-loading 'resources' on Instance uuid 9aa651b8-317d-4153-8c33-9df0a5d16115 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1014.146514] env[63345]: INFO nova.scheduler.client.report [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Deleted allocations for instance 3d1e47c5-7e8c-417c-8c7c-009db666d391 [ 1014.217017] env[63345]: DEBUG nova.objects.instance [None req-e336a3be-cd76-47bd-9cd6-2b52e78c4aab tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lazy-loading 'flavor' on Instance uuid 5e20b33c-1481-4bd3-b269-29a70cc3150d {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1014.267726] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017688, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075053} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.268219] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1014.271014] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f70bf54-1475-4dac-8261-c7b4ebbb89c5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.298995] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] 34990fa5-4a89-4430-8ea7-9e73dd41f441/34990fa5-4a89-4430-8ea7-9e73dd41f441.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1014.299890] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85c1904f-b94b-4a54-b1da-417b96fd4af4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.318775] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017686, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.325689] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 1014.325689] env[63345]: value = "task-1017689" [ 1014.325689] env[63345]: _type = "Task" [ 1014.325689] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.334875] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017689, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.404477] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "interface-dd624e54-bd5b-4660-88a1-9d6f36560421-f9c53757-0ec3-4d99-9493-d12a48f28db3" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.404834] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-dd624e54-bd5b-4660-88a1-9d6f36560421-f9c53757-0ec3-4d99-9493-d12a48f28db3" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.405294] env[63345]: DEBUG nova.objects.instance [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lazy-loading 'flavor' on Instance uuid dd624e54-bd5b-4660-88a1-9d6f36560421 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1014.656984] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e5a6e19e-36c5-4a15-b170-5f25384ba1a5 tempest-ServersNegativeTestMultiTenantJSON-818139682 tempest-ServersNegativeTestMultiTenantJSON-818139682-project-member] Lock "3d1e47c5-7e8c-417c-8c7c-009db666d391" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.732s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.790910] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017686, 'name': CreateVM_Task, 'duration_secs': 1.571827} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.791895] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1014.793970] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.793970] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.793970] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1014.794130] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db815fa2-52d3-4db5-a0be-8e310b5e6719 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.801583] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1014.801583] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52248f41-4085-e7d8-79e7-27c176df6c40" [ 1014.801583] env[63345]: _type = "Task" [ 1014.801583] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.811941] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52248f41-4085-e7d8-79e7-27c176df6c40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.813907] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e46410c-b2ca-46ad-b647-7e76b6940fb6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.821782] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77238ef7-800f-48b0-8e26-82d544eb7b32 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.858744] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e236ac-9ab6-4f54-8e30-d2428ce69b84 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.865511] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017689, 'name': ReconfigVM_Task, 'duration_secs': 0.366173} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.866273] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Reconfigured VM instance instance-00000065 to attach disk [datastore2] 34990fa5-4a89-4430-8ea7-9e73dd41f441/34990fa5-4a89-4430-8ea7-9e73dd41f441.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1014.866974] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-986f6c79-e253-48c0-9854-0c4e4b35ea1a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.872352] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9172e97-0c2d-4c22-9c31-5cbdcdaee6b1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.877649] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 1014.877649] env[63345]: value = "task-1017691" [ 1014.877649] env[63345]: _type = "Task" [ 1014.877649] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.891184] env[63345]: DEBUG nova.compute.provider_tree [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1014.898148] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017691, 'name': Rename_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.038724] env[63345]: DEBUG nova.objects.instance [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lazy-loading 'pci_requests' on Instance uuid dd624e54-bd5b-4660-88a1-9d6f36560421 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.226214] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e336a3be-cd76-47bd-9cd6-2b52e78c4aab tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "5e20b33c-1481-4bd3-b269-29a70cc3150d" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.247s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.315557] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52248f41-4085-e7d8-79e7-27c176df6c40, 'name': SearchDatastore_Task, 'duration_secs': 0.032774} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.316170] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.316580] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1015.317015] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.318031] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.318031] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1015.318031] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44f5a46a-eda6-4a52-bfdb-17ffdb246c01 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.329682] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1015.330772] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1015.331702] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5d92dcc-71cd-41f7-9767-6e3b12ce365a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.338407] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1015.338407] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5224410c-70b0-71e5-f51c-cb2ce953fb35" [ 1015.338407] env[63345]: _type = "Task" [ 1015.338407] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.347858] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5224410c-70b0-71e5-f51c-cb2ce953fb35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.392496] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017691, 'name': Rename_Task, 'duration_secs': 0.190021} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.392496] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1015.392496] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c66d97fc-fc27-4c64-a129-341a19808c12 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.406925] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 1015.406925] env[63345]: value = "task-1017692" [ 1015.406925] env[63345]: _type = "Task" [ 1015.406925] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.420968] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017692, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.434145] env[63345]: DEBUG nova.scheduler.client.report [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 135 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1015.434467] env[63345]: DEBUG nova.compute.provider_tree [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 135 to 136 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1015.434665] env[63345]: DEBUG nova.compute.provider_tree [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1015.542662] env[63345]: DEBUG nova.objects.base [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1015.543014] env[63345]: DEBUG nova.network.neutron [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1015.637342] env[63345]: DEBUG nova.policy [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e36fd04030444217acadbbf4e4fe9be0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33c28bfca4da460e8ca96dc7519204c8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 1015.851667] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5224410c-70b0-71e5-f51c-cb2ce953fb35, 'name': SearchDatastore_Task, 'duration_secs': 0.014476} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.851855] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e444cdc-e997-470d-9b9b-de562abea151 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.858747] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1015.858747] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52316274-3e03-1267-1440-24fd6f3f3708" [ 1015.858747] env[63345]: _type = "Task" [ 1015.858747] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.869201] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52316274-3e03-1267-1440-24fd6f3f3708, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.918931] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017692, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.932776] env[63345]: DEBUG oslo_concurrency.lockutils [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "5e20b33c-1481-4bd3-b269-29a70cc3150d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.933048] env[63345]: DEBUG oslo_concurrency.lockutils [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "5e20b33c-1481-4bd3-b269-29a70cc3150d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.933376] env[63345]: DEBUG oslo_concurrency.lockutils [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "5e20b33c-1481-4bd3-b269-29a70cc3150d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.933568] env[63345]: DEBUG oslo_concurrency.lockutils [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "5e20b33c-1481-4bd3-b269-29a70cc3150d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.933746] env[63345]: DEBUG oslo_concurrency.lockutils [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "5e20b33c-1481-4bd3-b269-29a70cc3150d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.935995] env[63345]: INFO nova.compute.manager [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Terminating instance [ 1015.941724] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.818s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.944064] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.713s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.944403] env[63345]: DEBUG nova.objects.instance [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Lazy-loading 'resources' on Instance uuid b3f20003-f75d-4d9f-bb4a-02d2930054a8 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.968479] env[63345]: INFO nova.scheduler.client.report [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Deleted allocations for instance 9aa651b8-317d-4153-8c33-9df0a5d16115 [ 1016.369984] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52316274-3e03-1267-1440-24fd6f3f3708, 'name': SearchDatastore_Task, 'duration_secs': 0.01559} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.370317] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.370589] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] a7d80763-92f0-45a9-b24b-1f973bffb376/a7d80763-92f0-45a9-b24b-1f973bffb376.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1016.370870] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b879542-4a8f-4a79-b54c-ebec68961865 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.380661] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1016.380661] env[63345]: value = "task-1017693" [ 1016.380661] env[63345]: _type = "Task" [ 1016.380661] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.389610] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017693, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.426746] env[63345]: DEBUG oslo_vmware.api [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017692, 'name': PowerOnVM_Task, 'duration_secs': 0.57177} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.427767] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1016.427767] env[63345]: INFO nova.compute.manager [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Took 8.08 seconds to spawn the instance on the hypervisor. [ 1016.427767] env[63345]: DEBUG nova.compute.manager [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1016.429071] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8743147e-9c3d-401a-a10c-cd6ea0d662af {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.440845] env[63345]: DEBUG nova.compute.manager [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1016.441132] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1016.441926] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74cc9075-2a21-46a3-b712-ddcca381c866 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.452827] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1016.455062] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c383d643-6437-4548-b9cd-f24232c1b510 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.462628] env[63345]: DEBUG oslo_vmware.api [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1016.462628] env[63345]: value = "task-1017694" [ 1016.462628] env[63345]: _type = "Task" [ 1016.462628] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.480724] env[63345]: DEBUG oslo_vmware.api [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017694, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.481608] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a2641803-f1fc-4e6f-9ecc-5bc8f29019e6 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "9aa651b8-317d-4153-8c33-9df0a5d16115" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.913s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.599132] env[63345]: DEBUG oslo_concurrency.lockutils [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.600061] env[63345]: DEBUG oslo_concurrency.lockutils [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.628071] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f3719a7-48a7-4897-a3c5-be1832f860aa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.639426] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b59a4a1-d05f-4603-95eb-08db191fe1b7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.672555] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588c842b-e46d-47da-9585-4e02d4709681 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.681111] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923d6375-0bcd-4b50-97c9-31a10f0f0809 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.696720] env[63345]: DEBUG nova.compute.provider_tree [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.892876] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017693, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.947401] env[63345]: INFO nova.compute.manager [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Took 13.85 seconds to build instance. [ 1016.976612] env[63345]: DEBUG oslo_vmware.api [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017694, 'name': PowerOffVM_Task, 'duration_secs': 0.308261} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.977281] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1016.977500] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1016.977816] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e5eca6a5-e284-4aee-a2c8-2a9c78f2c04f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.057171] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1017.057605] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1017.057904] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Deleting the datastore file [datastore2] 5e20b33c-1481-4bd3-b269-29a70cc3150d {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1017.058281] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9955b6a-a449-4e30-93d6-41b504f93384 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.067293] env[63345]: DEBUG oslo_vmware.api [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1017.067293] env[63345]: value = "task-1017697" [ 1017.067293] env[63345]: _type = "Task" [ 1017.067293] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.076805] env[63345]: DEBUG oslo_vmware.api [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017697, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.106599] env[63345]: DEBUG nova.compute.utils [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1017.196845] env[63345]: DEBUG nova.network.neutron [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Successfully updated port: f9c53757-0ec3-4d99-9493-d12a48f28db3 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1017.201405] env[63345]: DEBUG nova.scheduler.client.report [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1017.391924] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017693, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.638393} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.392272] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] a7d80763-92f0-45a9-b24b-1f973bffb376/a7d80763-92f0-45a9-b24b-1f973bffb376.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1017.392497] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1017.392769] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0f52333a-7f63-465a-bddd-34512a18cd95 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.401652] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1017.401652] env[63345]: value = "task-1017698" [ 1017.401652] env[63345]: _type = "Task" [ 1017.401652] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.411910] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017698, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.450891] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6eebd614-143d-4847-b8c0-0dd5b5067d12 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "34990fa5-4a89-4430-8ea7-9e73dd41f441" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.377s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.578784] env[63345]: DEBUG oslo_vmware.api [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017697, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.369116} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.579065] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1017.579274] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1017.579464] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1017.579646] env[63345]: INFO nova.compute.manager [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1017.579898] env[63345]: DEBUG oslo.service.loopingcall [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1017.580135] env[63345]: DEBUG nova.compute.manager [-] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1017.580226] env[63345]: DEBUG nova.network.neutron [-] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1017.610570] env[63345]: DEBUG oslo_concurrency.lockutils [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.699818] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.700055] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.700388] env[63345]: DEBUG nova.network.neutron [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1017.704560] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.761s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.732568] env[63345]: INFO nova.scheduler.client.report [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Deleted allocations for instance b3f20003-f75d-4d9f-bb4a-02d2930054a8 [ 1017.912413] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017698, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.399001} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.912714] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1017.913550] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ae2745-490a-4522-b45d-3dcdeae328cb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.936822] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] a7d80763-92f0-45a9-b24b-1f973bffb376/a7d80763-92f0-45a9-b24b-1f973bffb376.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1017.937080] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ef8be75-7f05-4a18-9680-a5ebe5e335fb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.959764] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1017.959764] env[63345]: value = "task-1017699" [ 1017.959764] env[63345]: _type = "Task" [ 1017.959764] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.969509] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017699, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.243556] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9cc41c77-46c3-4303-9c42-4251247bd5cb tempest-ImagesOneServerTestJSON-390874104 tempest-ImagesOneServerTestJSON-390874104-project-member] Lock "b3f20003-f75d-4d9f-bb4a-02d2930054a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.554s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.253873] env[63345]: WARNING nova.network.neutron [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] b360ab0d-3deb-4632-a8d5-c1639db9e9e2 already exists in list: networks containing: ['b360ab0d-3deb-4632-a8d5-c1639db9e9e2']. ignoring it [ 1018.392482] env[63345]: DEBUG nova.compute.manager [req-3673ec82-8dff-49c9-9c9e-d4031b25056b req-05e04337-5205-47e5-b366-3fe46a0fc0ab service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Received event network-vif-plugged-f9c53757-0ec3-4d99-9493-d12a48f28db3 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1018.392860] env[63345]: DEBUG oslo_concurrency.lockutils [req-3673ec82-8dff-49c9-9c9e-d4031b25056b req-05e04337-5205-47e5-b366-3fe46a0fc0ab service nova] Acquiring lock "dd624e54-bd5b-4660-88a1-9d6f36560421-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.393134] env[63345]: DEBUG oslo_concurrency.lockutils [req-3673ec82-8dff-49c9-9c9e-d4031b25056b req-05e04337-5205-47e5-b366-3fe46a0fc0ab service nova] Lock "dd624e54-bd5b-4660-88a1-9d6f36560421-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.393351] env[63345]: DEBUG oslo_concurrency.lockutils [req-3673ec82-8dff-49c9-9c9e-d4031b25056b req-05e04337-5205-47e5-b366-3fe46a0fc0ab service nova] Lock "dd624e54-bd5b-4660-88a1-9d6f36560421-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.393797] env[63345]: DEBUG nova.compute.manager [req-3673ec82-8dff-49c9-9c9e-d4031b25056b req-05e04337-5205-47e5-b366-3fe46a0fc0ab service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] No waiting events found dispatching network-vif-plugged-f9c53757-0ec3-4d99-9493-d12a48f28db3 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1018.393991] env[63345]: WARNING nova.compute.manager [req-3673ec82-8dff-49c9-9c9e-d4031b25056b req-05e04337-5205-47e5-b366-3fe46a0fc0ab service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Received unexpected event network-vif-plugged-f9c53757-0ec3-4d99-9493-d12a48f28db3 for instance with vm_state active and task_state None. [ 1018.475907] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017699, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.702494] env[63345]: DEBUG oslo_concurrency.lockutils [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.702880] env[63345]: DEBUG oslo_concurrency.lockutils [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.703157] env[63345]: INFO nova.compute.manager [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Attaching volume 87389bc2-cacc-4afc-ae89-1315868453a6 to /dev/sdb [ 1018.760345] env[63345]: DEBUG nova.network.neutron [-] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.768064] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148bb5bb-0e24-4dcb-b9c2-2a9195f1a469 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.773369] env[63345]: DEBUG nova.network.neutron [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Updating instance_info_cache with network_info: [{"id": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "address": "fa:16:3e:55:5a:7c", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a3e5f64-f8", "ovs_interfaceid": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f9c53757-0ec3-4d99-9493-d12a48f28db3", "address": "fa:16:3e:2f:a3:4f", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9c53757-0e", "ovs_interfaceid": "f9c53757-0ec3-4d99-9493-d12a48f28db3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.780590] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff0c9f8-9d64-47df-981f-5856e4dea106 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.799248] env[63345]: DEBUG nova.virt.block_device [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Updating existing volume attachment record: 65e470c5-0c39-411c-86c9-fb99df8446fb {{(pid=63345) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1018.973973] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017699, 'name': ReconfigVM_Task, 'duration_secs': 0.542016} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.974361] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Reconfigured VM instance instance-00000066 to attach disk [datastore2] a7d80763-92f0-45a9-b24b-1f973bffb376/a7d80763-92f0-45a9-b24b-1f973bffb376.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1018.975113] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40921eac-31ec-4595-bdb1-405e33935f0c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.983532] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1018.983532] env[63345]: value = "task-1017702" [ 1018.983532] env[63345]: _type = "Task" [ 1018.983532] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.993625] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017702, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.276455] env[63345]: INFO nova.compute.manager [-] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Took 1.70 seconds to deallocate network for instance. [ 1019.277133] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1019.277844] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.278030] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.289778] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f44d62e4-8333-4f1e-9495-af1a998be7c1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.312131] env[63345]: DEBUG nova.virt.hardware [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1019.312131] env[63345]: DEBUG nova.virt.hardware [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1019.312131] env[63345]: DEBUG nova.virt.hardware [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1019.312131] env[63345]: DEBUG nova.virt.hardware [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1019.312131] env[63345]: DEBUG nova.virt.hardware [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1019.312404] env[63345]: DEBUG nova.virt.hardware [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1019.312583] env[63345]: DEBUG nova.virt.hardware [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1019.312756] env[63345]: DEBUG nova.virt.hardware [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1019.313268] env[63345]: DEBUG nova.virt.hardware [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1019.313540] env[63345]: DEBUG nova.virt.hardware [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1019.314461] env[63345]: DEBUG nova.virt.hardware [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1019.321953] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Reconfiguring VM to attach interface {{(pid=63345) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 1019.322817] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d53b157f-7bf6-4b6b-8089-e9b5ef6e1365 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.342877] env[63345]: DEBUG oslo_vmware.api [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 1019.342877] env[63345]: value = "task-1017705" [ 1019.342877] env[63345]: _type = "Task" [ 1019.342877] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.352655] env[63345]: DEBUG oslo_vmware.api [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017705, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.496611] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017702, 'name': Rename_Task, 'duration_secs': 0.222697} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.497120] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1019.497406] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ceaff302-26b3-4d28-8f98-4ed22377ebc2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.504291] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1019.504291] env[63345]: value = "task-1017706" [ 1019.504291] env[63345]: _type = "Task" [ 1019.504291] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.514701] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017706, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.790061] env[63345]: DEBUG oslo_concurrency.lockutils [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.791121] env[63345]: DEBUG oslo_concurrency.lockutils [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.791498] env[63345]: DEBUG nova.objects.instance [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lazy-loading 'resources' on Instance uuid 5e20b33c-1481-4bd3-b269-29a70cc3150d {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1019.860317] env[63345]: DEBUG oslo_vmware.api [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017705, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.021072] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017706, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.364408] env[63345]: DEBUG oslo_vmware.api [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017705, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.411565] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ade8e6f-c67f-4d34-9fb4-5f80464b7353 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "34990fa5-4a89-4430-8ea7-9e73dd41f441" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.412581] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ade8e6f-c67f-4d34-9fb4-5f80464b7353 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "34990fa5-4a89-4430-8ea7-9e73dd41f441" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.412581] env[63345]: DEBUG nova.compute.manager [None req-6ade8e6f-c67f-4d34-9fb4-5f80464b7353 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1020.413381] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd16f9bd-9fcb-4dcc-a629-e8903a9b71e9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.191267] env[63345]: DEBUG nova.compute.manager [None req-6ade8e6f-c67f-4d34-9fb4-5f80464b7353 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63345) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3403}} [ 1021.191267] env[63345]: DEBUG nova.objects.instance [None req-6ade8e6f-c67f-4d34-9fb4-5f80464b7353 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lazy-loading 'flavor' on Instance uuid 34990fa5-4a89-4430-8ea7-9e73dd41f441 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1021.194641] env[63345]: DEBUG nova.compute.manager [req-dcad6408-beb9-4bfa-8749-2842196ccf12 req-def690fd-e863-4f1e-857d-6dca046f4a67 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Received event network-changed-f9c53757-0ec3-4d99-9493-d12a48f28db3 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1021.194868] env[63345]: DEBUG nova.compute.manager [req-dcad6408-beb9-4bfa-8749-2842196ccf12 req-def690fd-e863-4f1e-857d-6dca046f4a67 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Refreshing instance network info cache due to event network-changed-f9c53757-0ec3-4d99-9493-d12a48f28db3. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1021.196743] env[63345]: DEBUG oslo_concurrency.lockutils [req-dcad6408-beb9-4bfa-8749-2842196ccf12 req-def690fd-e863-4f1e-857d-6dca046f4a67 service nova] Acquiring lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1021.196743] env[63345]: DEBUG oslo_concurrency.lockutils [req-dcad6408-beb9-4bfa-8749-2842196ccf12 req-def690fd-e863-4f1e-857d-6dca046f4a67 service nova] Acquired lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.196743] env[63345]: DEBUG nova.network.neutron [req-dcad6408-beb9-4bfa-8749-2842196ccf12 req-def690fd-e863-4f1e-857d-6dca046f4a67 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Refreshing network info cache for port f9c53757-0ec3-4d99-9493-d12a48f28db3 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1021.208531] env[63345]: DEBUG oslo_vmware.api [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017706, 'name': PowerOnVM_Task, 'duration_secs': 0.858657} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.212070] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1021.212376] env[63345]: INFO nova.compute.manager [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Took 10.48 seconds to spawn the instance on the hypervisor. [ 1021.212597] env[63345]: DEBUG nova.compute.manager [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1021.213626] env[63345]: DEBUG oslo_vmware.api [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017705, 'name': ReconfigVM_Task, 'duration_secs': 1.027458} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.214644] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eef0c63-9849-4e1c-b96c-931d272b814d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.217786] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.217991] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Reconfigured VM to attach interface {{(pid=63345) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 1021.237847] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3afc6d1d-dd47-4d55-8723-548d4490e8f1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.246462] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce76f77-1a0a-443b-9514-642efafb6887 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.280033] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b857697-59f6-4c00-85f1-4a7484f33bdc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.287019] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6785d6fb-9419-4c64-af6b-5ba317836c76 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.292827] env[63345]: DEBUG oslo_vmware.rw_handles [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52457c22-3da5-ff6c-687e-fdfc78d95388/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1021.293573] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66196968-57d9-422c-b086-fe1d9197c63c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.307133] env[63345]: DEBUG nova.compute.provider_tree [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.309618] env[63345]: DEBUG oslo_vmware.rw_handles [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52457c22-3da5-ff6c-687e-fdfc78d95388/disk-0.vmdk is in state: ready. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1021.309825] env[63345]: ERROR oslo_vmware.rw_handles [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52457c22-3da5-ff6c-687e-fdfc78d95388/disk-0.vmdk due to incomplete transfer. [ 1021.310225] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8d55963d-0018-425a-be5f-52f8c3773e20 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.317450] env[63345]: DEBUG oslo_vmware.rw_handles [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52457c22-3da5-ff6c-687e-fdfc78d95388/disk-0.vmdk. {{(pid=63345) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1021.317661] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Uploaded image a645599a-3478-4c19-9a4f-0d1504b5322e to the Glance image server {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1021.320014] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Destroying the VM {{(pid=63345) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1021.320904] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-52d5f3cb-ec7f-4f02-91a7-f615a4211193 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.327703] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1021.327703] env[63345]: value = "task-1017708" [ 1021.327703] env[63345]: _type = "Task" [ 1021.327703] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.337555] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017708, 'name': Destroy_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.475760] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "95738bee-d291-4f27-aeff-9445939bb3fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.476050] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "95738bee-d291-4f27-aeff-9445939bb3fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.724685] env[63345]: DEBUG oslo_concurrency.lockutils [None req-bd6c1363-ab61-4f5d-bfa5-a74b21272a27 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-dd624e54-bd5b-4660-88a1-9d6f36560421-f9c53757-0ec3-4d99-9493-d12a48f28db3" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.320s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.740697] env[63345]: INFO nova.compute.manager [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Took 18.51 seconds to build instance. [ 1021.811928] env[63345]: DEBUG nova.scheduler.client.report [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1021.839651] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017708, 'name': Destroy_Task} progress is 33%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.972344] env[63345]: DEBUG nova.network.neutron [req-dcad6408-beb9-4bfa-8749-2842196ccf12 req-def690fd-e863-4f1e-857d-6dca046f4a67 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Updated VIF entry in instance network info cache for port f9c53757-0ec3-4d99-9493-d12a48f28db3. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1021.972821] env[63345]: DEBUG nova.network.neutron [req-dcad6408-beb9-4bfa-8749-2842196ccf12 req-def690fd-e863-4f1e-857d-6dca046f4a67 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Updating instance_info_cache with network_info: [{"id": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "address": "fa:16:3e:55:5a:7c", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a3e5f64-f8", "ovs_interfaceid": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f9c53757-0ec3-4d99-9493-d12a48f28db3", "address": "fa:16:3e:2f:a3:4f", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9c53757-0e", "ovs_interfaceid": "f9c53757-0ec3-4d99-9493-d12a48f28db3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.979909] env[63345]: DEBUG nova.compute.manager [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1022.211668] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ade8e6f-c67f-4d34-9fb4-5f80464b7353 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1022.211668] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8df1736-794a-43ba-82e3-339f636f4090 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.219374] env[63345]: DEBUG oslo_vmware.api [None req-6ade8e6f-c67f-4d34-9fb4-5f80464b7353 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 1022.219374] env[63345]: value = "task-1017709" [ 1022.219374] env[63345]: _type = "Task" [ 1022.219374] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.229544] env[63345]: DEBUG oslo_vmware.api [None req-6ade8e6f-c67f-4d34-9fb4-5f80464b7353 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017709, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.242495] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4326cf0f-9ab7-4de4-9a6d-011a701e526e tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "a7d80763-92f0-45a9-b24b-1f973bffb376" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.018s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.280592] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "0f3f59b6-e7bc-4657-af5f-eec18efc3666" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.280842] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "0f3f59b6-e7bc-4657-af5f-eec18efc3666" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.318048] env[63345]: DEBUG oslo_concurrency.lockutils [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.526s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.339058] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017708, 'name': Destroy_Task, 'duration_secs': 0.603389} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.340094] env[63345]: INFO nova.scheduler.client.report [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Deleted allocations for instance 5e20b33c-1481-4bd3-b269-29a70cc3150d [ 1022.341101] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Destroyed the VM [ 1022.341356] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Deleting Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1022.343677] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-72abc236-af1a-4478-8110-0e74fc9ffc34 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.350682] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1022.350682] env[63345]: value = "task-1017710" [ 1022.350682] env[63345]: _type = "Task" [ 1022.350682] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.359370] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017710, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.477499] env[63345]: DEBUG oslo_concurrency.lockutils [req-dcad6408-beb9-4bfa-8749-2842196ccf12 req-def690fd-e863-4f1e-857d-6dca046f4a67 service nova] Releasing lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1022.477814] env[63345]: DEBUG nova.compute.manager [req-dcad6408-beb9-4bfa-8749-2842196ccf12 req-def690fd-e863-4f1e-857d-6dca046f4a67 service nova] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Received event network-vif-deleted-77f9ccf5-22c3-4c4e-8a43-15543133f465 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1022.508564] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.509032] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.511083] env[63345]: INFO nova.compute.claims [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1022.602963] env[63345]: DEBUG oslo_concurrency.lockutils [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "da3408a0-cce7-4252-be47-097f081d83c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.603560] env[63345]: DEBUG oslo_concurrency.lockutils [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "da3408a0-cce7-4252-be47-097f081d83c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.731784] env[63345]: DEBUG oslo_vmware.api [None req-6ade8e6f-c67f-4d34-9fb4-5f80464b7353 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017709, 'name': PowerOffVM_Task, 'duration_secs': 0.360409} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.732687] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ade8e6f-c67f-4d34-9fb4-5f80464b7353 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1022.732997] env[63345]: DEBUG nova.compute.manager [None req-6ade8e6f-c67f-4d34-9fb4-5f80464b7353 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1022.733886] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0970bde-82d1-422d-a188-1dfb1b406c27 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.773718] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f9dc7741-3952-43fb-bcf5-4e551890e903 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "a7d80763-92f0-45a9-b24b-1f973bffb376" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.774063] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f9dc7741-3952-43fb-bcf5-4e551890e903 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "a7d80763-92f0-45a9-b24b-1f973bffb376" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.774328] env[63345]: DEBUG nova.compute.manager [None req-f9dc7741-3952-43fb-bcf5-4e551890e903 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1022.775423] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a465da4-72c1-474a-b119-21d92a49ae0f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.785340] env[63345]: DEBUG nova.compute.manager [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1022.789298] env[63345]: DEBUG nova.compute.manager [None req-f9dc7741-3952-43fb-bcf5-4e551890e903 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63345) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3403}} [ 1022.790333] env[63345]: DEBUG nova.objects.instance [None req-f9dc7741-3952-43fb-bcf5-4e551890e903 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lazy-loading 'flavor' on Instance uuid a7d80763-92f0-45a9-b24b-1f973bffb376 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1022.849469] env[63345]: DEBUG oslo_concurrency.lockutils [None req-321f3883-f1ef-40d3-aeea-845892155507 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "5e20b33c-1481-4bd3-b269-29a70cc3150d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.916s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.864743] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017710, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.106357] env[63345]: DEBUG nova.compute.manager [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1023.248257] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6ade8e6f-c67f-4d34-9fb4-5f80464b7353 tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "34990fa5-4a89-4430-8ea7-9e73dd41f441" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.836s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.309929] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.362590] env[63345]: DEBUG oslo_vmware.api [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017710, 'name': RemoveSnapshot_Task, 'duration_secs': 0.575879} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.362927] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Deleted Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1023.363224] env[63345]: INFO nova.compute.manager [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Took 18.00 seconds to snapshot the instance on the hypervisor. [ 1023.368111] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "interface-dd624e54-bd5b-4660-88a1-9d6f36560421-f9c53757-0ec3-4d99-9493-d12a48f28db3" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.368365] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-dd624e54-bd5b-4660-88a1-9d6f36560421-f9c53757-0ec3-4d99-9493-d12a48f28db3" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.370370] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Volume attach. Driver type: vmdk {{(pid=63345) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1023.370605] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226148', 'volume_id': '87389bc2-cacc-4afc-ae89-1315868453a6', 'name': 'volume-87389bc2-cacc-4afc-ae89-1315868453a6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '22a11cf9-8f85-4371-98eb-25b267c9aff7', 'attached_at': '', 'detached_at': '', 'volume_id': '87389bc2-cacc-4afc-ae89-1315868453a6', 'serial': '87389bc2-cacc-4afc-ae89-1315868453a6'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1023.371441] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d627f75-1f66-4e04-9626-d433cbe4bb4d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.390775] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11d5648-750d-4007-8d80-8775d3282557 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.421545] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] volume-87389bc2-cacc-4afc-ae89-1315868453a6/volume-87389bc2-cacc-4afc-ae89-1315868453a6.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1023.422273] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f30d87b-72c9-4057-8b6f-3c32ff6e30b8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.443699] env[63345]: DEBUG oslo_vmware.api [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1023.443699] env[63345]: value = "task-1017712" [ 1023.443699] env[63345]: _type = "Task" [ 1023.443699] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.453868] env[63345]: DEBUG oslo_vmware.api [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017712, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.628742] env[63345]: DEBUG oslo_concurrency.lockutils [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.674056] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e2f651-f64b-4e04-84cf-597834b3e44a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.681228] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752d7506-9fb3-4b34-b122-44e29b84f3eb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.712498] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357b0d5d-0063-4b25-9d9b-12c083841bea {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.721413] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448ba6da-d17c-4475-a519-d247c096db7e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.735115] env[63345]: DEBUG nova.compute.provider_tree [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.803105] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9dc7741-3952-43fb-bcf5-4e551890e903 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1023.803409] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0856a3f0-f8e4-40da-a220-64d172436c0a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.812024] env[63345]: DEBUG oslo_vmware.api [None req-f9dc7741-3952-43fb-bcf5-4e551890e903 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1023.812024] env[63345]: value = "task-1017713" [ 1023.812024] env[63345]: _type = "Task" [ 1023.812024] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.821294] env[63345]: DEBUG oslo_vmware.api [None req-f9dc7741-3952-43fb-bcf5-4e551890e903 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017713, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.875889] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1023.875889] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.876662] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374a86bb-d14c-4579-ab7d-b7bc8aef3376 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.901125] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c16afc-651a-4adf-8995-882777df6d03 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.933886] env[63345]: DEBUG nova.compute.manager [None req-a218ce07-ac78-4a72-9854-e8fb6b118af6 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Found 2 images (rotation: 2) {{(pid=63345) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4883}} [ 1023.942924] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Reconfiguring VM to detach interface {{(pid=63345) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1974}} [ 1023.943763] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-93656656-a880-43fa-97bc-1d65fd3c13d9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.975436] env[63345]: DEBUG oslo_vmware.api [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017712, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.977561] env[63345]: DEBUG oslo_vmware.api [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 1023.977561] env[63345]: value = "task-1017714" [ 1023.977561] env[63345]: _type = "Task" [ 1023.977561] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.992051] env[63345]: DEBUG oslo_vmware.api [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017714, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.238261] env[63345]: DEBUG nova.scheduler.client.report [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1024.323627] env[63345]: DEBUG oslo_vmware.api [None req-f9dc7741-3952-43fb-bcf5-4e551890e903 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017713, 'name': PowerOffVM_Task, 'duration_secs': 0.202032} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.327018] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9dc7741-3952-43fb-bcf5-4e551890e903 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1024.327018] env[63345]: DEBUG nova.compute.manager [None req-f9dc7741-3952-43fb-bcf5-4e551890e903 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1024.327018] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f841a8a3-4779-45de-83b5-b070c61cdaa9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.401114] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "34990fa5-4a89-4430-8ea7-9e73dd41f441" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.401301] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "34990fa5-4a89-4430-8ea7-9e73dd41f441" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.401494] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "34990fa5-4a89-4430-8ea7-9e73dd41f441-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.401642] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "34990fa5-4a89-4430-8ea7-9e73dd41f441-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.401816] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "34990fa5-4a89-4430-8ea7-9e73dd41f441-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.405012] env[63345]: INFO nova.compute.manager [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Terminating instance [ 1024.478352] env[63345]: DEBUG oslo_vmware.api [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017712, 'name': ReconfigVM_Task, 'duration_secs': 0.691662} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.478759] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Reconfigured VM instance instance-0000005e to attach disk [datastore1] volume-87389bc2-cacc-4afc-ae89-1315868453a6/volume-87389bc2-cacc-4afc-ae89-1315868453a6.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1024.486448] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfd21d79-65a9-4617-bc43-3df5d5d741e0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.505251] env[63345]: DEBUG oslo_vmware.api [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017714, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.506750] env[63345]: DEBUG oslo_vmware.api [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1024.506750] env[63345]: value = "task-1017715" [ 1024.506750] env[63345]: _type = "Task" [ 1024.506750] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.515637] env[63345]: DEBUG oslo_vmware.api [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017715, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.744748] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.235s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.745326] env[63345]: DEBUG nova.compute.manager [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1024.748962] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.438s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.749524] env[63345]: INFO nova.compute.claims [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1024.839764] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f9dc7741-3952-43fb-bcf5-4e551890e903 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "a7d80763-92f0-45a9-b24b-1f973bffb376" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.065s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.908801] env[63345]: DEBUG nova.compute.manager [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1024.909054] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1024.909940] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a75c242-afe4-4cee-a9b3-017dfa6748e5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.919651] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1024.919901] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a83d12e-9bc2-4a74-aea3-b4b63809aa47 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.953975] env[63345]: DEBUG nova.compute.manager [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1024.954903] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2450b6c2-d632-412b-8d0d-ae81409b1c89 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.991142] env[63345]: DEBUG oslo_vmware.api [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017714, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.997796] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1024.998164] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1024.998465] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleting the datastore file [datastore2] 34990fa5-4a89-4430-8ea7-9e73dd41f441 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1024.998761] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad3eea43-b286-4e93-9963-bb1b71f29101 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.007409] env[63345]: DEBUG oslo_vmware.api [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for the task: (returnval){ [ 1025.007409] env[63345]: value = "task-1017718" [ 1025.007409] env[63345]: _type = "Task" [ 1025.007409] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.022099] env[63345]: DEBUG oslo_vmware.api [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017718, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.022422] env[63345]: DEBUG oslo_vmware.api [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017715, 'name': ReconfigVM_Task, 'duration_secs': 0.186229} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.022727] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226148', 'volume_id': '87389bc2-cacc-4afc-ae89-1315868453a6', 'name': 'volume-87389bc2-cacc-4afc-ae89-1315868453a6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '22a11cf9-8f85-4371-98eb-25b267c9aff7', 'attached_at': '', 'detached_at': '', 'volume_id': '87389bc2-cacc-4afc-ae89-1315868453a6', 'serial': '87389bc2-cacc-4afc-ae89-1315868453a6'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1025.081264] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "95ef4f91-a618-4ae2-95ad-d027c031f239" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.081550] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "95ef4f91-a618-4ae2-95ad-d027c031f239" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.254209] env[63345]: DEBUG nova.compute.utils [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1025.258778] env[63345]: DEBUG nova.compute.manager [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1025.258969] env[63345]: DEBUG nova.network.neutron [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1025.300906] env[63345]: DEBUG nova.policy [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '15bc99129abe498abb7549a6578a68d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba08f64c26d245a8b8f2b52ea97c2f1a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 1025.309352] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "a7d80763-92f0-45a9-b24b-1f973bffb376" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.309607] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "a7d80763-92f0-45a9-b24b-1f973bffb376" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.309827] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "a7d80763-92f0-45a9-b24b-1f973bffb376-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.310020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "a7d80763-92f0-45a9-b24b-1f973bffb376-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.310205] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "a7d80763-92f0-45a9-b24b-1f973bffb376-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.312219] env[63345]: INFO nova.compute.manager [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Terminating instance [ 1025.466360] env[63345]: INFO nova.compute.manager [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] instance snapshotting [ 1025.466766] env[63345]: DEBUG nova.objects.instance [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lazy-loading 'flavor' on Instance uuid 726332dd-8699-49a4-a9ea-b9cbfc159855 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1025.490529] env[63345]: DEBUG oslo_vmware.api [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017714, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.520802] env[63345]: DEBUG oslo_vmware.api [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Task: {'id': task-1017718, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135433} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.521076] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1025.521278] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1025.521467] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1025.521645] env[63345]: INFO nova.compute.manager [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1025.521951] env[63345]: DEBUG oslo.service.loopingcall [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1025.522173] env[63345]: DEBUG nova.compute.manager [-] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1025.522290] env[63345]: DEBUG nova.network.neutron [-] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1025.584257] env[63345]: DEBUG nova.compute.manager [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1025.663117] env[63345]: DEBUG nova.network.neutron [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Successfully created port: f9b10cca-c2c3-45d2-a329-61efee5dde7f {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1025.765730] env[63345]: DEBUG nova.compute.manager [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1025.807384] env[63345]: DEBUG nova.compute.manager [req-e584d18a-4f15-437c-bc92-68559530e8d1 req-ea17e39b-18df-4c35-bb03-8baf73d783d4 service nova] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Received event network-vif-deleted-0b7f6d00-228a-4a62-a372-c1c21d8d6aa3 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1025.807479] env[63345]: INFO nova.compute.manager [req-e584d18a-4f15-437c-bc92-68559530e8d1 req-ea17e39b-18df-4c35-bb03-8baf73d783d4 service nova] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Neutron deleted interface 0b7f6d00-228a-4a62-a372-c1c21d8d6aa3; detaching it from the instance and deleting it from the info cache [ 1025.807661] env[63345]: DEBUG nova.network.neutron [req-e584d18a-4f15-437c-bc92-68559530e8d1 req-ea17e39b-18df-4c35-bb03-8baf73d783d4 service nova] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.816218] env[63345]: DEBUG nova.compute.manager [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1025.818042] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1025.818042] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8828a09f-62ad-4d30-bd0f-1e8f4d11e283 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.828584] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1025.831113] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a0435358-e7de-4e37-9a53-cd3ff74711d4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.911170] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1025.911425] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1025.912215] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleting the datastore file [datastore2] a7d80763-92f0-45a9-b24b-1f973bffb376 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1025.914454] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-191839e1-e381-4cb6-b0ee-cc8d10e811c8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.921936] env[63345]: DEBUG oslo_vmware.api [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1025.921936] env[63345]: value = "task-1017720" [ 1025.921936] env[63345]: _type = "Task" [ 1025.921936] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.932683] env[63345]: DEBUG oslo_vmware.api [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017720, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.965866] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d578a458-9d4f-4eb6-9b4f-030d7840ae7a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.976261] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02663879-4083-41f0-8171-bd32b7b7d4e3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.979800] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c057d903-074e-406a-ba06-1bbe339ffbe3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.025356] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be01a476-f9a7-432d-b1c5-82ffec4f3572 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.028029] env[63345]: DEBUG oslo_vmware.api [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017714, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.028971] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c3ed9e-efbc-43e1-bddc-b3c5d2aa38ce {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.038199] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ecaf350-3be2-45bb-8916-fd5d43bddaca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.056356] env[63345]: DEBUG nova.compute.provider_tree [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1026.065394] env[63345]: DEBUG nova.objects.instance [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lazy-loading 'flavor' on Instance uuid 22a11cf9-8f85-4371-98eb-25b267c9aff7 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1026.104171] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.281349] env[63345]: DEBUG nova.network.neutron [-] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.315093] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e1c289ac-c1ef-4874-b467-28b51a991710 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.324446] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8f6933-b48a-4b56-ac7d-591287ca066e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.354254] env[63345]: DEBUG nova.compute.manager [req-e584d18a-4f15-437c-bc92-68559530e8d1 req-ea17e39b-18df-4c35-bb03-8baf73d783d4 service nova] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Detach interface failed, port_id=0b7f6d00-228a-4a62-a372-c1c21d8d6aa3, reason: Instance 34990fa5-4a89-4430-8ea7-9e73dd41f441 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1026.432789] env[63345]: DEBUG oslo_vmware.api [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017720, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129782} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.433016] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1026.433215] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1026.433428] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1026.433605] env[63345]: INFO nova.compute.manager [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1026.433852] env[63345]: DEBUG oslo.service.loopingcall [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1026.434063] env[63345]: DEBUG nova.compute.manager [-] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1026.434160] env[63345]: DEBUG nova.network.neutron [-] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1026.492478] env[63345]: DEBUG oslo_vmware.api [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017714, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.548269] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Creating Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1026.548623] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b04d88b3-2ab9-471f-8d66-20df3e3ed326 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.558331] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1026.558331] env[63345]: value = "task-1017721" [ 1026.558331] env[63345]: _type = "Task" [ 1026.558331] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.572993] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017721, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.572993] env[63345]: DEBUG oslo_concurrency.lockutils [None req-799b6c6a-3cd8-4c98-9f18-15ee76e32175 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.869s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.578032] env[63345]: ERROR nova.scheduler.client.report [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [req-5f4d7d2f-560b-425f-b494-1a971698b0c9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5f4d7d2f-560b-425f-b494-1a971698b0c9"}]} [ 1026.597935] env[63345]: DEBUG nova.scheduler.client.report [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1026.613607] env[63345]: DEBUG nova.scheduler.client.report [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1026.614191] env[63345]: DEBUG nova.compute.provider_tree [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1026.627711] env[63345]: DEBUG nova.scheduler.client.report [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1026.649028] env[63345]: DEBUG nova.scheduler.client.report [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1026.778672] env[63345]: DEBUG nova.compute.manager [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1026.783040] env[63345]: INFO nova.compute.manager [-] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Took 1.26 seconds to deallocate network for instance. [ 1026.814223] env[63345]: DEBUG nova.virt.hardware [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1026.814917] env[63345]: DEBUG nova.virt.hardware [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1026.814917] env[63345]: DEBUG nova.virt.hardware [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1026.815109] env[63345]: DEBUG nova.virt.hardware [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1026.815142] env[63345]: DEBUG nova.virt.hardware [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1026.816028] env[63345]: DEBUG nova.virt.hardware [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1026.816028] env[63345]: DEBUG nova.virt.hardware [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1026.816028] env[63345]: DEBUG nova.virt.hardware [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1026.816028] env[63345]: DEBUG nova.virt.hardware [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1026.816303] env[63345]: DEBUG nova.virt.hardware [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1026.816620] env[63345]: DEBUG nova.virt.hardware [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1026.817936] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8865b31a-5d22-45f7-9775-f92d58f70c59 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.825764] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa751646-adcc-4899-b3bf-359a889109f1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.831652] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1a5d31-5e88-43fc-bc08-2c394cb87d7f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.839375] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce411838-1147-4358-b6b7-dfb755d9da88 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.880637] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d61c53a-6b00-4476-87c7-9f9334febf46 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.888413] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d357f7-778f-4b0b-8fab-a0352c91e30d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.901933] env[63345]: DEBUG nova.compute.provider_tree [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1026.992163] env[63345]: DEBUG oslo_vmware.api [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017714, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.074356] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017721, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.105571] env[63345]: DEBUG nova.compute.manager [req-b85c596d-6600-4aec-8aac-4235025422f4 req-3049c4e8-37b0-41b7-880e-e5a16507c647 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Received event network-vif-plugged-f9b10cca-c2c3-45d2-a329-61efee5dde7f {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1027.105815] env[63345]: DEBUG oslo_concurrency.lockutils [req-b85c596d-6600-4aec-8aac-4235025422f4 req-3049c4e8-37b0-41b7-880e-e5a16507c647 service nova] Acquiring lock "95738bee-d291-4f27-aeff-9445939bb3fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.106044] env[63345]: DEBUG oslo_concurrency.lockutils [req-b85c596d-6600-4aec-8aac-4235025422f4 req-3049c4e8-37b0-41b7-880e-e5a16507c647 service nova] Lock "95738bee-d291-4f27-aeff-9445939bb3fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.106219] env[63345]: DEBUG oslo_concurrency.lockutils [req-b85c596d-6600-4aec-8aac-4235025422f4 req-3049c4e8-37b0-41b7-880e-e5a16507c647 service nova] Lock "95738bee-d291-4f27-aeff-9445939bb3fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.106995] env[63345]: DEBUG nova.compute.manager [req-b85c596d-6600-4aec-8aac-4235025422f4 req-3049c4e8-37b0-41b7-880e-e5a16507c647 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] No waiting events found dispatching network-vif-plugged-f9b10cca-c2c3-45d2-a329-61efee5dde7f {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1027.107250] env[63345]: WARNING nova.compute.manager [req-b85c596d-6600-4aec-8aac-4235025422f4 req-3049c4e8-37b0-41b7-880e-e5a16507c647 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Received unexpected event network-vif-plugged-f9b10cca-c2c3-45d2-a329-61efee5dde7f for instance with vm_state building and task_state spawning. [ 1027.126414] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f835b62c-0fa7-410f-824a-5b3d546d0e8e tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.126678] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f835b62c-0fa7-410f-824a-5b3d546d0e8e tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.126862] env[63345]: DEBUG nova.compute.manager [None req-f835b62c-0fa7-410f-824a-5b3d546d0e8e tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1027.127756] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0eb1da6-9996-4b0e-a9b3-88c8899b4071 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.136262] env[63345]: DEBUG nova.compute.manager [None req-f835b62c-0fa7-410f-824a-5b3d546d0e8e tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63345) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3403}} [ 1027.136986] env[63345]: DEBUG nova.objects.instance [None req-f835b62c-0fa7-410f-824a-5b3d546d0e8e tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lazy-loading 'flavor' on Instance uuid 22a11cf9-8f85-4371-98eb-25b267c9aff7 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1027.192392] env[63345]: DEBUG nova.network.neutron [-] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.289728] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.435572] env[63345]: DEBUG nova.scheduler.client.report [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 140 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1027.435901] env[63345]: DEBUG nova.compute.provider_tree [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 140 to 141 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1027.436144] env[63345]: DEBUG nova.compute.provider_tree [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1027.492847] env[63345]: DEBUG oslo_vmware.api [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017714, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.572022] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017721, 'name': CreateSnapshot_Task, 'duration_secs': 0.537118} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.572333] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Created Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1027.573090] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e563ab-bc2f-496b-b7fc-2fa16ccfc084 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.669741] env[63345]: DEBUG nova.network.neutron [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Successfully updated port: f9b10cca-c2c3-45d2-a329-61efee5dde7f {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1027.695144] env[63345]: DEBUG nova.compute.manager [req-5a7a1bd1-5c55-41ae-af43-35cd451093f8 req-d715dbca-48d0-408c-883c-2cb16c8b1759 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Received event network-changed-f9b10cca-c2c3-45d2-a329-61efee5dde7f {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1027.695363] env[63345]: DEBUG nova.compute.manager [req-5a7a1bd1-5c55-41ae-af43-35cd451093f8 req-d715dbca-48d0-408c-883c-2cb16c8b1759 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Refreshing instance network info cache due to event network-changed-f9b10cca-c2c3-45d2-a329-61efee5dde7f. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1027.695594] env[63345]: DEBUG oslo_concurrency.lockutils [req-5a7a1bd1-5c55-41ae-af43-35cd451093f8 req-d715dbca-48d0-408c-883c-2cb16c8b1759 service nova] Acquiring lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.695747] env[63345]: DEBUG oslo_concurrency.lockutils [req-5a7a1bd1-5c55-41ae-af43-35cd451093f8 req-d715dbca-48d0-408c-883c-2cb16c8b1759 service nova] Acquired lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.695913] env[63345]: DEBUG nova.network.neutron [req-5a7a1bd1-5c55-41ae-af43-35cd451093f8 req-d715dbca-48d0-408c-883c-2cb16c8b1759 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Refreshing network info cache for port f9b10cca-c2c3-45d2-a329-61efee5dde7f {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1027.697298] env[63345]: INFO nova.compute.manager [-] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Took 1.26 seconds to deallocate network for instance. [ 1027.832567] env[63345]: DEBUG nova.compute.manager [req-dbb94027-6074-4327-b60d-355abef69a17 req-f7b52848-cfc0-432a-8b07-ef55fcac2cfb service nova] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Received event network-vif-deleted-c5799dfa-7c87-4bbf-b2c7-28aef5b31d52 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1027.941562] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.193s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.942137] env[63345]: DEBUG nova.compute.manager [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1027.945415] env[63345]: DEBUG oslo_concurrency.lockutils [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.317s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.946882] env[63345]: INFO nova.compute.claims [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1027.994009] env[63345]: DEBUG oslo_vmware.api [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017714, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.089999] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Creating linked-clone VM from snapshot {{(pid=63345) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1028.090733] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7c685517-4c4b-445c-af26-c7bf4dedb2b2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.101668] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1028.101668] env[63345]: value = "task-1017723" [ 1028.101668] env[63345]: _type = "Task" [ 1028.101668] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.109209] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017723, 'name': CloneVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.143418] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f835b62c-0fa7-410f-824a-5b3d546d0e8e tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1028.143715] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e36febb-8874-4ae7-8be6-fe3d0ad6c9e8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.152644] env[63345]: DEBUG oslo_vmware.api [None req-f835b62c-0fa7-410f-824a-5b3d546d0e8e tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1028.152644] env[63345]: value = "task-1017724" [ 1028.152644] env[63345]: _type = "Task" [ 1028.152644] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.163579] env[63345]: DEBUG oslo_vmware.api [None req-f835b62c-0fa7-410f-824a-5b3d546d0e8e tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017724, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.176324] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.204143] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.233637] env[63345]: DEBUG nova.network.neutron [req-5a7a1bd1-5c55-41ae-af43-35cd451093f8 req-d715dbca-48d0-408c-883c-2cb16c8b1759 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1028.310363] env[63345]: DEBUG nova.network.neutron [req-5a7a1bd1-5c55-41ae-af43-35cd451093f8 req-d715dbca-48d0-408c-883c-2cb16c8b1759 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.447444] env[63345]: DEBUG nova.compute.utils [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1028.448920] env[63345]: DEBUG nova.compute.manager [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Not allocating networking since 'none' was specified. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 1028.495426] env[63345]: DEBUG oslo_vmware.api [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017714, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.611804] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017723, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.661969] env[63345]: DEBUG oslo_vmware.api [None req-f835b62c-0fa7-410f-824a-5b3d546d0e8e tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017724, 'name': PowerOffVM_Task, 'duration_secs': 0.189521} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.662389] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f835b62c-0fa7-410f-824a-5b3d546d0e8e tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1028.662633] env[63345]: DEBUG nova.compute.manager [None req-f835b62c-0fa7-410f-824a-5b3d546d0e8e tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1028.663493] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec45bf9-7d9a-4eab-835c-a524e3852ede {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.813486] env[63345]: DEBUG oslo_concurrency.lockutils [req-5a7a1bd1-5c55-41ae-af43-35cd451093f8 req-d715dbca-48d0-408c-883c-2cb16c8b1759 service nova] Releasing lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.813894] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquired lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.814073] env[63345]: DEBUG nova.network.neutron [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1028.950467] env[63345]: DEBUG nova.compute.manager [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1028.994814] env[63345]: DEBUG oslo_vmware.api [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017714, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.114976] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017723, 'name': CloneVM_Task} progress is 95%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.132490] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2a3812-7bd1-4884-97f0-3a4dd0605f3f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.140478] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23fef5c0-011f-4688-809b-1ddaed3493f8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.176376] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f835b62c-0fa7-410f-824a-5b3d546d0e8e tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.049s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.177974] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c6a5f3-d1b1-48c8-8f00-6e4d68c2df8e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.187899] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f3d4ec-186f-4f34-a2f4-e39b610adb5d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.205022] env[63345]: DEBUG nova.compute.provider_tree [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.356995] env[63345]: DEBUG nova.network.neutron [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1029.495216] env[63345]: DEBUG oslo_vmware.api [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017714, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.549295] env[63345]: DEBUG nova.objects.instance [None req-2aea3361-087c-44d3-8177-a435de003f47 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lazy-loading 'flavor' on Instance uuid 22a11cf9-8f85-4371-98eb-25b267c9aff7 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1029.578294] env[63345]: DEBUG nova.network.neutron [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Updating instance_info_cache with network_info: [{"id": "f9b10cca-c2c3-45d2-a329-61efee5dde7f", "address": "fa:16:3e:31:f2:fb", "network": {"id": "95d95c9b-b21c-4ee5-ab54-d0bf2699d38e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-88421441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba08f64c26d245a8b8f2b52ea97c2f1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9b10cca-c2", "ovs_interfaceid": "f9b10cca-c2c3-45d2-a329-61efee5dde7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.615342] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017723, 'name': CloneVM_Task, 'duration_secs': 1.234481} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.615626] env[63345]: INFO nova.virt.vmwareapi.vmops [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Created linked-clone VM from snapshot [ 1029.616462] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c697f9ea-2d92-4b66-81cc-dd3e62e5e434 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.623556] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Uploading image e440825b-4356-4ee4-8b0a-5a0a6c082c83 {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1029.648987] env[63345]: DEBUG oslo_vmware.rw_handles [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1029.648987] env[63345]: value = "vm-226150" [ 1029.648987] env[63345]: _type = "VirtualMachine" [ 1029.648987] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1029.649276] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0a71dffb-5ff2-4343-940b-3d8be748c171 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.655728] env[63345]: DEBUG oslo_vmware.rw_handles [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lease: (returnval){ [ 1029.655728] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52693f10-9597-48bd-14aa-0d7738f9e54b" [ 1029.655728] env[63345]: _type = "HttpNfcLease" [ 1029.655728] env[63345]: } obtained for exporting VM: (result){ [ 1029.655728] env[63345]: value = "vm-226150" [ 1029.655728] env[63345]: _type = "VirtualMachine" [ 1029.655728] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1029.656022] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the lease: (returnval){ [ 1029.656022] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52693f10-9597-48bd-14aa-0d7738f9e54b" [ 1029.656022] env[63345]: _type = "HttpNfcLease" [ 1029.656022] env[63345]: } to be ready. {{(pid=63345) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1029.663241] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1029.663241] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52693f10-9597-48bd-14aa-0d7738f9e54b" [ 1029.663241] env[63345]: _type = "HttpNfcLease" [ 1029.663241] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1029.708855] env[63345]: DEBUG nova.scheduler.client.report [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1029.964054] env[63345]: DEBUG nova.compute.manager [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1029.985706] env[63345]: DEBUG nova.virt.hardware [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1029.986009] env[63345]: DEBUG nova.virt.hardware [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1029.986199] env[63345]: DEBUG nova.virt.hardware [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1029.986393] env[63345]: DEBUG nova.virt.hardware [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1029.986552] env[63345]: DEBUG nova.virt.hardware [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1029.986708] env[63345]: DEBUG nova.virt.hardware [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1029.986919] env[63345]: DEBUG nova.virt.hardware [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1029.987098] env[63345]: DEBUG nova.virt.hardware [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1029.987277] env[63345]: DEBUG nova.virt.hardware [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1029.987466] env[63345]: DEBUG nova.virt.hardware [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1029.987647] env[63345]: DEBUG nova.virt.hardware [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1029.988564] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f8560f-dedf-46a8-9ea8-96f9dd5adc00 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.999939] env[63345]: DEBUG oslo_vmware.api [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017714, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.002870] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71d1fa5-2ef8-446c-af4f-ab205c3f5e51 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.018710] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Instance VIF info [] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1030.024323] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Creating folder: Project (a2c76ef6776547ad8591f8db14e80b84). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1030.024543] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-200867af-15d1-483c-84e9-a7f672694224 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.034221] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Created folder: Project (a2c76ef6776547ad8591f8db14e80b84) in parent group-v225918. [ 1030.034396] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Creating folder: Instances. Parent ref: group-v226151. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1030.034610] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f995fce9-952a-4249-a7ef-c89807a9f60e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.042720] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Created folder: Instances in parent group-v226151. [ 1030.042948] env[63345]: DEBUG oslo.service.loopingcall [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1030.043154] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1030.043353] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-55ae832b-b2b9-434b-8fa2-2475f26fc462 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.056702] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2aea3361-087c-44d3-8177-a435de003f47 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "refresh_cache-22a11cf9-8f85-4371-98eb-25b267c9aff7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.056877] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2aea3361-087c-44d3-8177-a435de003f47 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquired lock "refresh_cache-22a11cf9-8f85-4371-98eb-25b267c9aff7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.057088] env[63345]: DEBUG nova.network.neutron [None req-2aea3361-087c-44d3-8177-a435de003f47 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1030.057274] env[63345]: DEBUG nova.objects.instance [None req-2aea3361-087c-44d3-8177-a435de003f47 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lazy-loading 'info_cache' on Instance uuid 22a11cf9-8f85-4371-98eb-25b267c9aff7 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.060137] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1030.060137] env[63345]: value = "task-1017728" [ 1030.060137] env[63345]: _type = "Task" [ 1030.060137] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.066826] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017728, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.080915] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Releasing lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1030.081246] env[63345]: DEBUG nova.compute.manager [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Instance network_info: |[{"id": "f9b10cca-c2c3-45d2-a329-61efee5dde7f", "address": "fa:16:3e:31:f2:fb", "network": {"id": "95d95c9b-b21c-4ee5-ab54-d0bf2699d38e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-88421441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba08f64c26d245a8b8f2b52ea97c2f1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9b10cca-c2", "ovs_interfaceid": "f9b10cca-c2c3-45d2-a329-61efee5dde7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1030.081627] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:f2:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f9b10cca-c2c3-45d2-a329-61efee5dde7f', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1030.088849] env[63345]: DEBUG oslo.service.loopingcall [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1030.089604] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1030.089824] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c1f975f-e0ee-47f4-8b46-c3f77c411ca4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.107864] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1030.107864] env[63345]: value = "task-1017729" [ 1030.107864] env[63345]: _type = "Task" [ 1030.107864] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.114974] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017729, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.164938] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1030.164938] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52693f10-9597-48bd-14aa-0d7738f9e54b" [ 1030.164938] env[63345]: _type = "HttpNfcLease" [ 1030.164938] env[63345]: } is ready. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1030.165353] env[63345]: DEBUG oslo_vmware.rw_handles [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1030.165353] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52693f10-9597-48bd-14aa-0d7738f9e54b" [ 1030.165353] env[63345]: _type = "HttpNfcLease" [ 1030.165353] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1030.166323] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d011ec-c359-47b3-91da-b7c0ba8fab14 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.174478] env[63345]: DEBUG oslo_vmware.rw_handles [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528b77b1-acaa-f4f5-2294-ebcef12bf34c/disk-0.vmdk from lease info. {{(pid=63345) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1030.174688] env[63345]: DEBUG oslo_vmware.rw_handles [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528b77b1-acaa-f4f5-2294-ebcef12bf34c/disk-0.vmdk for reading. {{(pid=63345) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1030.237046] env[63345]: DEBUG oslo_concurrency.lockutils [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.291s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.237645] env[63345]: DEBUG nova.compute.manager [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1030.241650] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.138s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.243550] env[63345]: INFO nova.compute.claims [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1030.277686] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-77c30e5e-bdeb-48c0-b433-a8e3946b7143 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.341810] env[63345]: DEBUG oslo_concurrency.lockutils [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "bce78147-6f6d-47a2-84f3-482f59a8bb8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.342204] env[63345]: DEBUG oslo_concurrency.lockutils [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "bce78147-6f6d-47a2-84f3-482f59a8bb8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.497846] env[63345]: DEBUG oslo_vmware.api [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017714, 'name': ReconfigVM_Task, 'duration_secs': 6.305462} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.498162] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1030.498403] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Reconfigured VM to detach interface {{(pid=63345) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1984}} [ 1030.560157] env[63345]: DEBUG nova.objects.base [None req-2aea3361-087c-44d3-8177-a435de003f47 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Object Instance<22a11cf9-8f85-4371-98eb-25b267c9aff7> lazy-loaded attributes: flavor,info_cache {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1030.573280] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017728, 'name': CreateVM_Task, 'duration_secs': 0.313209} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.574049] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1030.574172] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.574353] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.574692] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1030.574955] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-644fb376-1324-4a4b-b885-82f8b44372a7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.579759] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1030.579759] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5260dcbe-00a6-60f8-b6d3-8b23ab31d5ab" [ 1030.579759] env[63345]: _type = "Task" [ 1030.579759] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.589547] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5260dcbe-00a6-60f8-b6d3-8b23ab31d5ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.617975] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017729, 'name': CreateVM_Task, 'duration_secs': 0.352733} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.618274] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1030.619061] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.750505] env[63345]: DEBUG nova.compute.utils [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1030.752109] env[63345]: DEBUG nova.compute.manager [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Not allocating networking since 'none' was specified. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 1030.845252] env[63345]: DEBUG nova.compute.manager [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1031.089823] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5260dcbe-00a6-60f8-b6d3-8b23ab31d5ab, 'name': SearchDatastore_Task, 'duration_secs': 0.012699} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.090236] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.090506] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1031.090824] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.091047] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.091303] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1031.091672] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.092119] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1031.092445] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39658158-5647-4648-92a5-6dd8cf012d0b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.094338] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bdf9fb6-aeae-43f2-98aa-953d0b7e08e1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.102482] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1031.102482] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f5adfa-5755-f274-8ece-4a928cd368d9" [ 1031.102482] env[63345]: _type = "Task" [ 1031.102482] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.107290] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1031.107568] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1031.108709] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5ca86eb-9b82-4443-91fc-db4adb2119d5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.114413] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f5adfa-5755-f274-8ece-4a928cd368d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.117223] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1031.117223] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]521dec05-8e90-4dcb-41cf-c5e0f02c28bd" [ 1031.117223] env[63345]: _type = "Task" [ 1031.117223] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.124423] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521dec05-8e90-4dcb-41cf-c5e0f02c28bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.252474] env[63345]: DEBUG nova.compute.manager [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1031.311015] env[63345]: DEBUG nova.network.neutron [None req-2aea3361-087c-44d3-8177-a435de003f47 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Updating instance_info_cache with network_info: [{"id": "267ad158-547a-4d3a-a838-3d964626d731", "address": "fa:16:3e:9e:ba:8d", "network": {"id": "13df4553-212e-4adb-8de0-da1acdf99671", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-238696814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4560e378b6aa47a3bbb5a2f7c5b76f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap267ad158-54", "ovs_interfaceid": "267ad158-547a-4d3a-a838-3d964626d731", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.365503] env[63345]: DEBUG oslo_concurrency.lockutils [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.478991] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4002856-9a55-40ca-9c60-a3904d8ea4d2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.487115] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851f6b5a-2e80-4fc7-8c85-ba1a3829070c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.521362] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86011703-98fc-4fc3-ac72-e1641f77e2cd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.529237] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668ca760-f69d-499d-aa85-d9b59d42eee7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.543604] env[63345]: DEBUG nova.compute.provider_tree [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.613547] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f5adfa-5755-f274-8ece-4a928cd368d9, 'name': SearchDatastore_Task, 'duration_secs': 0.014069} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.613990] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.614342] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1031.614644] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.626863] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521dec05-8e90-4dcb-41cf-c5e0f02c28bd, 'name': SearchDatastore_Task, 'duration_secs': 0.012007} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.627633] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13f49ec7-680e-480f-9072-b2c023fc2ba0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.632905] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1031.632905] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]525a45d9-cfe2-67ae-34e4-e8b0139981fb" [ 1031.632905] env[63345]: _type = "Task" [ 1031.632905] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.640713] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]525a45d9-cfe2-67ae-34e4-e8b0139981fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.817323] env[63345]: DEBUG oslo_concurrency.lockutils [None req-2aea3361-087c-44d3-8177-a435de003f47 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Releasing lock "refresh_cache-22a11cf9-8f85-4371-98eb-25b267c9aff7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.848954] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.849185] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.849468] env[63345]: DEBUG nova.network.neutron [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1032.046633] env[63345]: DEBUG nova.scheduler.client.report [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1032.144129] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]525a45d9-cfe2-67ae-34e4-e8b0139981fb, 'name': SearchDatastore_Task, 'duration_secs': 0.015286} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.144481] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.144767] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 0f3f59b6-e7bc-4657-af5f-eec18efc3666/0f3f59b6-e7bc-4657-af5f-eec18efc3666.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1032.145165] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.145378] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1032.145668] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3bed06a-90c8-468c-be09-70ffa627e375 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.147885] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b50524b0-5862-411e-85da-828fe16eb8b0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.154780] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1032.154780] env[63345]: value = "task-1017730" [ 1032.154780] env[63345]: _type = "Task" [ 1032.154780] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.159278] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1032.159513] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1032.160722] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-badca7cf-9a4c-4750-9d72-9ddfe3d4e701 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.167899] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017730, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.170971] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1032.170971] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52337782-a385-f12c-3b17-208c0dd7778c" [ 1032.170971] env[63345]: _type = "Task" [ 1032.170971] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.178138] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52337782-a385-f12c-3b17-208c0dd7778c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.265374] env[63345]: DEBUG nova.compute.manager [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1032.280826] env[63345]: DEBUG nova.compute.manager [req-7900859d-507d-4404-b134-a6f92655825c req-6439a7ba-d025-4d18-9cf8-cc82a0d28bdd service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Received event network-changed-8a3e5f64-f812-4c1b-a9e0-b8b3146a1467 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1032.281323] env[63345]: DEBUG nova.compute.manager [req-7900859d-507d-4404-b134-a6f92655825c req-6439a7ba-d025-4d18-9cf8-cc82a0d28bdd service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Refreshing instance network info cache due to event network-changed-8a3e5f64-f812-4c1b-a9e0-b8b3146a1467. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1032.281323] env[63345]: DEBUG oslo_concurrency.lockutils [req-7900859d-507d-4404-b134-a6f92655825c req-6439a7ba-d025-4d18-9cf8-cc82a0d28bdd service nova] Acquiring lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.290741] env[63345]: DEBUG nova.virt.hardware [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1032.291051] env[63345]: DEBUG nova.virt.hardware [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1032.291341] env[63345]: DEBUG nova.virt.hardware [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1032.291551] env[63345]: DEBUG nova.virt.hardware [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1032.291715] env[63345]: DEBUG nova.virt.hardware [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1032.291965] env[63345]: DEBUG nova.virt.hardware [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1032.292216] env[63345]: DEBUG nova.virt.hardware [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1032.292687] env[63345]: DEBUG nova.virt.hardware [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1032.292916] env[63345]: DEBUG nova.virt.hardware [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1032.293105] env[63345]: DEBUG nova.virt.hardware [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1032.293290] env[63345]: DEBUG nova.virt.hardware [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1032.294947] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17cca8cb-d252-40bc-bdf7-a4b74342a289 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.302422] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62fc8272-abdb-470f-abc8-143c1328ea06 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.316375] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Instance VIF info [] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1032.322582] env[63345]: DEBUG oslo.service.loopingcall [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1032.328137] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1032.328137] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46c8745b-6d30-4336-ae11-12d56c580542 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.345808] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1032.345808] env[63345]: value = "task-1017731" [ 1032.345808] env[63345]: _type = "Task" [ 1032.345808] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.357053] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017731, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.551903] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.310s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.552575] env[63345]: DEBUG nova.compute.manager [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1032.555644] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.266s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.555934] env[63345]: DEBUG nova.objects.instance [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lazy-loading 'resources' on Instance uuid 34990fa5-4a89-4430-8ea7-9e73dd41f441 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1032.668519] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017730, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.684182] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52337782-a385-f12c-3b17-208c0dd7778c, 'name': SearchDatastore_Task, 'duration_secs': 0.010547} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.688191] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97d3b6b8-b262-4a73-86b3-02cb29622863 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.695251] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1032.695251] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52819e73-4325-b1e2-b5c1-3147dff1af44" [ 1032.695251] env[63345]: _type = "Task" [ 1032.695251] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.706773] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52819e73-4325-b1e2-b5c1-3147dff1af44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.731437] env[63345]: INFO nova.network.neutron [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Port f9c53757-0ec3-4d99-9493-d12a48f28db3 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1032.731883] env[63345]: DEBUG nova.network.neutron [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Updating instance_info_cache with network_info: [{"id": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "address": "fa:16:3e:55:5a:7c", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a3e5f64-f8", "ovs_interfaceid": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.838366] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aea3361-087c-44d3-8177-a435de003f47 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1032.838802] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d98cfcbe-1f3f-4e27-ae69-e4c56905ac78 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.849436] env[63345]: DEBUG oslo_vmware.api [None req-2aea3361-087c-44d3-8177-a435de003f47 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1032.849436] env[63345]: value = "task-1017732" [ 1032.849436] env[63345]: _type = "Task" [ 1032.849436] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.864330] env[63345]: DEBUG oslo_vmware.api [None req-2aea3361-087c-44d3-8177-a435de003f47 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017732, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.868627] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017731, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.060149] env[63345]: DEBUG nova.compute.utils [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1033.061807] env[63345]: DEBUG nova.compute.manager [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1033.062030] env[63345]: DEBUG nova.network.neutron [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1033.064272] env[63345]: DEBUG oslo_concurrency.lockutils [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "interface-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2-f9c53757-0ec3-4d99-9493-d12a48f28db3" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.064537] env[63345]: DEBUG oslo_concurrency.lockutils [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2-f9c53757-0ec3-4d99-9493-d12a48f28db3" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.064871] env[63345]: DEBUG nova.objects.instance [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lazy-loading 'flavor' on Instance uuid 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1033.120601] env[63345]: DEBUG nova.policy [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '738e7097762c42d490a66c3d86af9635', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '41afa63287424a549133615eb390bac7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 1033.168625] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017730, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.596208} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.168909] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 0f3f59b6-e7bc-4657-af5f-eec18efc3666/0f3f59b6-e7bc-4657-af5f-eec18efc3666.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1033.169155] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1033.169427] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5aacfa40-904d-4e6f-ab5e-4b868ad9a567 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.176723] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1033.176723] env[63345]: value = "task-1017733" [ 1033.176723] env[63345]: _type = "Task" [ 1033.176723] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.187047] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017733, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.208606] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52819e73-4325-b1e2-b5c1-3147dff1af44, 'name': SearchDatastore_Task, 'duration_secs': 0.041853} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.209009] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.209301] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 95738bee-d291-4f27-aeff-9445939bb3fa/95738bee-d291-4f27-aeff-9445939bb3fa.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1033.209577] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8fb3d0a2-afe9-4637-b0cc-2c2fdd5c6761 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.219216] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1033.219216] env[63345]: value = "task-1017734" [ 1033.219216] env[63345]: _type = "Task" [ 1033.219216] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.228189] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017734, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.234945] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.238372] env[63345]: DEBUG oslo_concurrency.lockutils [req-7900859d-507d-4404-b134-a6f92655825c req-6439a7ba-d025-4d18-9cf8-cc82a0d28bdd service nova] Acquired lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.238582] env[63345]: DEBUG nova.network.neutron [req-7900859d-507d-4404-b134-a6f92655825c req-6439a7ba-d025-4d18-9cf8-cc82a0d28bdd service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Refreshing network info cache for port 8a3e5f64-f812-4c1b-a9e0-b8b3146a1467 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1033.264132] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e87a281-fa24-4260-886b-0c103e962b13 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.274175] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f7ddd92-5442-42c5-8659-0d7b40600c2b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.305378] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0d10a3-1545-4121-abcc-1da9160d1959 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.312597] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd2f77d-33c2-4d08-9524-d51694b95edd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.327586] env[63345]: DEBUG nova.compute.provider_tree [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1033.358901] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017731, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.364165] env[63345]: DEBUG oslo_vmware.api [None req-2aea3361-087c-44d3-8177-a435de003f47 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017732, 'name': PowerOnVM_Task, 'duration_secs': 0.436884} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.364465] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aea3361-087c-44d3-8177-a435de003f47 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1033.364707] env[63345]: DEBUG nova.compute.manager [None req-2aea3361-087c-44d3-8177-a435de003f47 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1033.365512] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d070ed-2186-4a92-bb2a-81f48799598b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.458273] env[63345]: DEBUG nova.network.neutron [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Successfully created port: e65d10a2-1bac-4be4-846a-6fc94207c2b7 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1033.565479] env[63345]: DEBUG nova.compute.manager [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1033.675877] env[63345]: DEBUG nova.objects.instance [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lazy-loading 'pci_requests' on Instance uuid 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1033.688247] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017733, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066542} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.689163] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1033.690079] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988c2423-5263-4ab3-b9ac-369a0f3521ff {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.712425] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 0f3f59b6-e7bc-4657-af5f-eec18efc3666/0f3f59b6-e7bc-4657-af5f-eec18efc3666.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1033.713182] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2516bba4-f3be-4818-b88f-22b41fa11dc2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.742888] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f2f37c9e-38cd-4fb3-8e8f-ddc7c55887a7 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-dd624e54-bd5b-4660-88a1-9d6f36560421-f9c53757-0ec3-4d99-9493-d12a48f28db3" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.374s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.748482] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017734, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.750361] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1033.750361] env[63345]: value = "task-1017735" [ 1033.750361] env[63345]: _type = "Task" [ 1033.750361] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.759924] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017735, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.854016] env[63345]: ERROR nova.scheduler.client.report [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] [req-c410b104-812f-415f-88e6-728678ee0822] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c410b104-812f-415f-88e6-728678ee0822"}]} [ 1033.864571] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017731, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.878816] env[63345]: DEBUG nova.scheduler.client.report [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1033.900901] env[63345]: DEBUG nova.scheduler.client.report [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1033.901253] env[63345]: DEBUG nova.compute.provider_tree [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1033.913973] env[63345]: DEBUG nova.scheduler.client.report [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1033.941555] env[63345]: DEBUG nova.scheduler.client.report [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1034.007137] env[63345]: DEBUG nova.network.neutron [req-7900859d-507d-4404-b134-a6f92655825c req-6439a7ba-d025-4d18-9cf8-cc82a0d28bdd service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Updated VIF entry in instance network info cache for port 8a3e5f64-f812-4c1b-a9e0-b8b3146a1467. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1034.007543] env[63345]: DEBUG nova.network.neutron [req-7900859d-507d-4404-b134-a6f92655825c req-6439a7ba-d025-4d18-9cf8-cc82a0d28bdd service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Updating instance_info_cache with network_info: [{"id": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "address": "fa:16:3e:55:5a:7c", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a3e5f64-f8", "ovs_interfaceid": "8a3e5f64-f812-4c1b-a9e0-b8b3146a1467", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.171890] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85bb9da2-2609-4d4f-8504-0e9bdef43080 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.181569] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f205d373-3bce-4eb6-8d4d-e67387a4de64 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.185390] env[63345]: DEBUG nova.objects.base [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Object Instance<7057cdfc-a6d9-4e52-b650-6a5709d5f8c2> lazy-loaded attributes: flavor,pci_requests {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1034.185605] env[63345]: DEBUG nova.network.neutron [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1034.218705] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4420774-1c88-45d6-8394-e7403b82f08a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.226999] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8d31ce-b44c-4b7f-bc22-e94007d18246 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.244398] env[63345]: DEBUG nova.compute.provider_tree [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1034.252512] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017734, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.262264] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017735, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.265151] env[63345]: DEBUG nova.policy [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e36fd04030444217acadbbf4e4fe9be0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33c28bfca4da460e8ca96dc7519204c8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 1034.360082] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017731, 'name': CreateVM_Task, 'duration_secs': 1.64791} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.360288] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1034.360779] env[63345]: DEBUG oslo_concurrency.lockutils [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.360954] env[63345]: DEBUG oslo_concurrency.lockutils [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.361318] env[63345]: DEBUG oslo_concurrency.lockutils [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1034.361594] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7792fe5-1117-4f53-a527-1b0eef124166 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.366637] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1034.366637] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]522fbc20-9e13-9293-9fda-0700ff56c656" [ 1034.366637] env[63345]: _type = "Task" [ 1034.366637] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.376455] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]522fbc20-9e13-9293-9fda-0700ff56c656, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.510606] env[63345]: DEBUG oslo_concurrency.lockutils [req-7900859d-507d-4404-b134-a6f92655825c req-6439a7ba-d025-4d18-9cf8-cc82a0d28bdd service nova] Releasing lock "refresh_cache-dd624e54-bd5b-4660-88a1-9d6f36560421" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.576361] env[63345]: DEBUG nova.compute.manager [req-1da35930-716b-4a00-8473-6552b1318310 req-f657cd98-22e1-4b4b-bb91-341641aab824 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Received event network-changed-d6e5e759-86e1-4f76-9b65-19b2691780df {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1034.576677] env[63345]: DEBUG nova.compute.manager [req-1da35930-716b-4a00-8473-6552b1318310 req-f657cd98-22e1-4b4b-bb91-341641aab824 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Refreshing instance network info cache due to event network-changed-d6e5e759-86e1-4f76-9b65-19b2691780df. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1034.576942] env[63345]: DEBUG oslo_concurrency.lockutils [req-1da35930-716b-4a00-8473-6552b1318310 req-f657cd98-22e1-4b4b-bb91-341641aab824 service nova] Acquiring lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.577125] env[63345]: DEBUG oslo_concurrency.lockutils [req-1da35930-716b-4a00-8473-6552b1318310 req-f657cd98-22e1-4b4b-bb91-341641aab824 service nova] Acquired lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.577404] env[63345]: DEBUG nova.network.neutron [req-1da35930-716b-4a00-8473-6552b1318310 req-f657cd98-22e1-4b4b-bb91-341641aab824 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Refreshing network info cache for port d6e5e759-86e1-4f76-9b65-19b2691780df {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1034.586325] env[63345]: DEBUG nova.compute.manager [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1034.612697] env[63345]: DEBUG nova.virt.hardware [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1034.613121] env[63345]: DEBUG nova.virt.hardware [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1034.613353] env[63345]: DEBUG nova.virt.hardware [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1034.613655] env[63345]: DEBUG nova.virt.hardware [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1034.614172] env[63345]: DEBUG nova.virt.hardware [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1034.614172] env[63345]: DEBUG nova.virt.hardware [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1034.614326] env[63345]: DEBUG nova.virt.hardware [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1034.614453] env[63345]: DEBUG nova.virt.hardware [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1034.614815] env[63345]: DEBUG nova.virt.hardware [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1034.614923] env[63345]: DEBUG nova.virt.hardware [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1034.615160] env[63345]: DEBUG nova.virt.hardware [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1034.616340] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aab3d82-bdf6-4919-a351-1387f31d3df1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.625102] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23703ed0-0262-42a6-9437-48ad9dd89327 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.743601] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017734, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.132967} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.743955] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 95738bee-d291-4f27-aeff-9445939bb3fa/95738bee-d291-4f27-aeff-9445939bb3fa.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1034.744091] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1034.744415] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fb59a502-d303-4159-ac81-8b8b0a4a1d95 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.751156] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1034.751156] env[63345]: value = "task-1017736" [ 1034.751156] env[63345]: _type = "Task" [ 1034.751156] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.769840] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017735, 'name': ReconfigVM_Task, 'duration_secs': 0.632322} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.772436] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 0f3f59b6-e7bc-4657-af5f-eec18efc3666/0f3f59b6-e7bc-4657-af5f-eec18efc3666.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1034.773103] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017736, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.773339] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d01a0154-6b21-4961-9afd-e781fa1eedb9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.779855] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1034.779855] env[63345]: value = "task-1017737" [ 1034.779855] env[63345]: _type = "Task" [ 1034.779855] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.788287] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017737, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.789186] env[63345]: DEBUG nova.scheduler.client.report [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 142 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1034.789790] env[63345]: DEBUG nova.compute.provider_tree [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 142 to 143 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1034.789790] env[63345]: DEBUG nova.compute.provider_tree [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1034.878403] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]522fbc20-9e13-9293-9fda-0700ff56c656, 'name': SearchDatastore_Task, 'duration_secs': 0.011889} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.878755] env[63345]: DEBUG oslo_concurrency.lockutils [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.879021] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1034.879297] env[63345]: DEBUG oslo_concurrency.lockutils [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.879440] env[63345]: DEBUG oslo_concurrency.lockutils [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.879626] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1034.879906] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-684ded9b-78ea-4f06-bab9-75f92a45fa13 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.887746] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1034.887976] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1034.888774] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65268780-fdf1-4f50-81d9-9a1b4efe84b1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.895284] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1034.895284] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52e77baa-2d55-5efb-62da-57a26901b1bd" [ 1034.895284] env[63345]: _type = "Task" [ 1034.895284] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.902158] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e77baa-2d55-5efb-62da-57a26901b1bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.261188] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017736, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10604} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.261598] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1035.262444] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d33d47-ab66-476d-927b-886832cf6a36 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.286017] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 95738bee-d291-4f27-aeff-9445939bb3fa/95738bee-d291-4f27-aeff-9445939bb3fa.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1035.288814] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3fd558a1-6ead-4a34-9ea7-c0327b9ac2fd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.309057] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.753s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.311446] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.107s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.311683] env[63345]: DEBUG nova.objects.instance [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lazy-loading 'resources' on Instance uuid a7d80763-92f0-45a9-b24b-1f973bffb376 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1035.318063] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017737, 'name': Rename_Task, 'duration_secs': 0.191899} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.319463] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1035.319923] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1035.319923] env[63345]: value = "task-1017738" [ 1035.319923] env[63345]: _type = "Task" [ 1035.319923] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.320728] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-98be8429-9b6f-4d7e-923e-9b28aad11bcd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.334128] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017738, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.335888] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1035.335888] env[63345]: value = "task-1017739" [ 1035.335888] env[63345]: _type = "Task" [ 1035.335888] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.346682] env[63345]: INFO nova.scheduler.client.report [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Deleted allocations for instance 34990fa5-4a89-4430-8ea7-9e73dd41f441 [ 1035.406730] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e77baa-2d55-5efb-62da-57a26901b1bd, 'name': SearchDatastore_Task, 'duration_secs': 0.012354} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.409205] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c331016-e692-4849-89ba-02847af06fba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.414608] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1035.414608] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52e7fec9-fe83-d3ff-e280-5dd776499bf9" [ 1035.414608] env[63345]: _type = "Task" [ 1035.414608] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.424473] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e7fec9-fe83-d3ff-e280-5dd776499bf9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.588638] env[63345]: DEBUG nova.network.neutron [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Successfully updated port: e65d10a2-1bac-4be4-846a-6fc94207c2b7 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1035.602481] env[63345]: DEBUG nova.network.neutron [req-1da35930-716b-4a00-8473-6552b1318310 req-f657cd98-22e1-4b4b-bb91-341641aab824 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Updated VIF entry in instance network info cache for port d6e5e759-86e1-4f76-9b65-19b2691780df. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1035.603200] env[63345]: DEBUG nova.network.neutron [req-1da35930-716b-4a00-8473-6552b1318310 req-f657cd98-22e1-4b4b-bb91-341641aab824 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Updating instance_info_cache with network_info: [{"id": "d6e5e759-86e1-4f76-9b65-19b2691780df", "address": "fa:16:3e:21:49:6e", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6e5e759-86", "ovs_interfaceid": "d6e5e759-86e1-4f76-9b65-19b2691780df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.805355] env[63345]: DEBUG nova.network.neutron [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Successfully updated port: f9c53757-0ec3-4d99-9493-d12a48f28db3 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1035.814541] env[63345]: DEBUG nova.compute.manager [req-9305191a-fb14-42b4-9f98-38565e8bcf3d req-48ce21bb-a062-416b-b3a7-1dfb16409fe0 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Received event network-vif-plugged-f9c53757-0ec3-4d99-9493-d12a48f28db3 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1035.814735] env[63345]: DEBUG oslo_concurrency.lockutils [req-9305191a-fb14-42b4-9f98-38565e8bcf3d req-48ce21bb-a062-416b-b3a7-1dfb16409fe0 service nova] Acquiring lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.814963] env[63345]: DEBUG oslo_concurrency.lockutils [req-9305191a-fb14-42b4-9f98-38565e8bcf3d req-48ce21bb-a062-416b-b3a7-1dfb16409fe0 service nova] Lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.815164] env[63345]: DEBUG oslo_concurrency.lockutils [req-9305191a-fb14-42b4-9f98-38565e8bcf3d req-48ce21bb-a062-416b-b3a7-1dfb16409fe0 service nova] Lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.815351] env[63345]: DEBUG nova.compute.manager [req-9305191a-fb14-42b4-9f98-38565e8bcf3d req-48ce21bb-a062-416b-b3a7-1dfb16409fe0 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] No waiting events found dispatching network-vif-plugged-f9c53757-0ec3-4d99-9493-d12a48f28db3 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1035.815514] env[63345]: WARNING nova.compute.manager [req-9305191a-fb14-42b4-9f98-38565e8bcf3d req-48ce21bb-a062-416b-b3a7-1dfb16409fe0 service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Received unexpected event network-vif-plugged-f9c53757-0ec3-4d99-9493-d12a48f28db3 for instance with vm_state active and task_state None. [ 1035.831467] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017738, 'name': ReconfigVM_Task, 'duration_secs': 0.459299} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.831773] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 95738bee-d291-4f27-aeff-9445939bb3fa/95738bee-d291-4f27-aeff-9445939bb3fa.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1035.832431] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c3d6112a-a9be-489a-bbad-e56c7865be97 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.843972] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1035.843972] env[63345]: value = "task-1017740" [ 1035.843972] env[63345]: _type = "Task" [ 1035.843972] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.850856] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017739, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.858106] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017740, 'name': Rename_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.858632] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0b2c3cb-2322-4a75-926e-5ed64f87f03c tempest-ServersTestJSON-216022561 tempest-ServersTestJSON-216022561-project-member] Lock "34990fa5-4a89-4430-8ea7-9e73dd41f441" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.457s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.924174] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e7fec9-fe83-d3ff-e280-5dd776499bf9, 'name': SearchDatastore_Task, 'duration_secs': 0.012027} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.927035] env[63345]: DEBUG oslo_concurrency.lockutils [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.927347] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] da3408a0-cce7-4252-be47-097f081d83c1/da3408a0-cce7-4252-be47-097f081d83c1.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1035.927828] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-625f321e-f611-44b9-9c10-bad70c8eec52 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.935102] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1035.935102] env[63345]: value = "task-1017741" [ 1035.935102] env[63345]: _type = "Task" [ 1035.935102] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.945605] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017741, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.000880] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-383a5efc-94f4-4036-ad46-ad47a881e8d8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.010584] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ad2e88-b710-4d28-8796-aa60c3cb4c36 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.041509] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b65b7c0-54c3-4eb0-b6c7-e3f67bc6eb12 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.049547] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beec938f-745d-48b9-b510-c09c3c6ac431 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.062852] env[63345]: DEBUG nova.compute.provider_tree [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.092342] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "refresh_cache-95ef4f91-a618-4ae2-95ad-d027c031f239" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.092622] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquired lock "refresh_cache-95ef4f91-a618-4ae2-95ad-d027c031f239" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.092720] env[63345]: DEBUG nova.network.neutron [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1036.105907] env[63345]: DEBUG oslo_concurrency.lockutils [req-1da35930-716b-4a00-8473-6552b1318310 req-f657cd98-22e1-4b4b-bb91-341641aab824 service nova] Releasing lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.308981] env[63345]: DEBUG oslo_concurrency.lockutils [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.309200] env[63345]: DEBUG oslo_concurrency.lockutils [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.309398] env[63345]: DEBUG nova.network.neutron [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1036.350832] env[63345]: DEBUG oslo_vmware.api [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017739, 'name': PowerOnVM_Task, 'duration_secs': 0.868217} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.351560] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1036.351783] env[63345]: INFO nova.compute.manager [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Took 6.39 seconds to spawn the instance on the hypervisor. [ 1036.351991] env[63345]: DEBUG nova.compute.manager [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1036.353166] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e6268a9-4ddf-4f2a-a391-f90c0e9cb19e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.359043] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017740, 'name': Rename_Task, 'duration_secs': 0.193736} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.359732] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1036.360044] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-10e6ebc9-278f-406a-84ba-454b4537e511 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.371118] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1036.371118] env[63345]: value = "task-1017742" [ 1036.371118] env[63345]: _type = "Task" [ 1036.371118] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.379586] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017742, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.447235] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017741, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.566149] env[63345]: DEBUG nova.scheduler.client.report [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1036.638856] env[63345]: DEBUG nova.network.neutron [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1036.750479] env[63345]: DEBUG nova.compute.manager [req-83102e3a-e949-4099-83d0-a8c7beb1b25c req-114ffe8d-45dc-4706-8e85-e24694c5e173 service nova] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Received event network-vif-plugged-e65d10a2-1bac-4be4-846a-6fc94207c2b7 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1036.751444] env[63345]: DEBUG oslo_concurrency.lockutils [req-83102e3a-e949-4099-83d0-a8c7beb1b25c req-114ffe8d-45dc-4706-8e85-e24694c5e173 service nova] Acquiring lock "95ef4f91-a618-4ae2-95ad-d027c031f239-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.751873] env[63345]: DEBUG oslo_concurrency.lockutils [req-83102e3a-e949-4099-83d0-a8c7beb1b25c req-114ffe8d-45dc-4706-8e85-e24694c5e173 service nova] Lock "95ef4f91-a618-4ae2-95ad-d027c031f239-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.752272] env[63345]: DEBUG oslo_concurrency.lockutils [req-83102e3a-e949-4099-83d0-a8c7beb1b25c req-114ffe8d-45dc-4706-8e85-e24694c5e173 service nova] Lock "95ef4f91-a618-4ae2-95ad-d027c031f239-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.752752] env[63345]: DEBUG nova.compute.manager [req-83102e3a-e949-4099-83d0-a8c7beb1b25c req-114ffe8d-45dc-4706-8e85-e24694c5e173 service nova] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] No waiting events found dispatching network-vif-plugged-e65d10a2-1bac-4be4-846a-6fc94207c2b7 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1036.753243] env[63345]: WARNING nova.compute.manager [req-83102e3a-e949-4099-83d0-a8c7beb1b25c req-114ffe8d-45dc-4706-8e85-e24694c5e173 service nova] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Received unexpected event network-vif-plugged-e65d10a2-1bac-4be4-846a-6fc94207c2b7 for instance with vm_state building and task_state spawning. [ 1036.753458] env[63345]: DEBUG nova.compute.manager [req-83102e3a-e949-4099-83d0-a8c7beb1b25c req-114ffe8d-45dc-4706-8e85-e24694c5e173 service nova] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Received event network-changed-e65d10a2-1bac-4be4-846a-6fc94207c2b7 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1036.753761] env[63345]: DEBUG nova.compute.manager [req-83102e3a-e949-4099-83d0-a8c7beb1b25c req-114ffe8d-45dc-4706-8e85-e24694c5e173 service nova] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Refreshing instance network info cache due to event network-changed-e65d10a2-1bac-4be4-846a-6fc94207c2b7. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1036.754084] env[63345]: DEBUG oslo_concurrency.lockutils [req-83102e3a-e949-4099-83d0-a8c7beb1b25c req-114ffe8d-45dc-4706-8e85-e24694c5e173 service nova] Acquiring lock "refresh_cache-95ef4f91-a618-4ae2-95ad-d027c031f239" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.848028] env[63345]: WARNING nova.network.neutron [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] b360ab0d-3deb-4632-a8d5-c1639db9e9e2 already exists in list: networks containing: ['b360ab0d-3deb-4632-a8d5-c1639db9e9e2']. ignoring it [ 1036.882600] env[63345]: DEBUG nova.network.neutron [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Updating instance_info_cache with network_info: [{"id": "e65d10a2-1bac-4be4-846a-6fc94207c2b7", "address": "fa:16:3e:c5:a7:39", "network": {"id": "372a3368-2d7a-4380-b811-7ad477d85250", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-454648225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41afa63287424a549133615eb390bac7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape65d10a2-1b", "ovs_interfaceid": "e65d10a2-1bac-4be4-846a-6fc94207c2b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.884842] env[63345]: INFO nova.compute.manager [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Took 13.59 seconds to build instance. [ 1036.892020] env[63345]: DEBUG oslo_vmware.api [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017742, 'name': PowerOnVM_Task, 'duration_secs': 0.483499} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.892370] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1036.892820] env[63345]: INFO nova.compute.manager [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Took 10.11 seconds to spawn the instance on the hypervisor. [ 1036.893109] env[63345]: DEBUG nova.compute.manager [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1036.894204] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632ada86-16c6-4c76-996d-a60e56d409d2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.945796] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017741, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.588145} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.946194] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] da3408a0-cce7-4252-be47-097f081d83c1/da3408a0-cce7-4252-be47-097f081d83c1.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1036.946486] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1036.947520] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f8fe7cf7-7b33-4684-a466-7f31c7040387 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.957337] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1036.957337] env[63345]: value = "task-1017743" [ 1036.957337] env[63345]: _type = "Task" [ 1036.957337] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.965711] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017743, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.073250] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.762s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.075503] env[63345]: DEBUG oslo_concurrency.lockutils [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.710s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.077182] env[63345]: INFO nova.compute.claims [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1037.100456] env[63345]: INFO nova.scheduler.client.report [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleted allocations for instance a7d80763-92f0-45a9-b24b-1f973bffb376 [ 1037.273015] env[63345]: DEBUG nova.network.neutron [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Updating instance_info_cache with network_info: [{"id": "d6e5e759-86e1-4f76-9b65-19b2691780df", "address": "fa:16:3e:21:49:6e", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6e5e759-86", "ovs_interfaceid": "d6e5e759-86e1-4f76-9b65-19b2691780df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f9c53757-0ec3-4d99-9493-d12a48f28db3", "address": "fa:16:3e:2f:a3:4f", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9c53757-0e", "ovs_interfaceid": "f9c53757-0ec3-4d99-9493-d12a48f28db3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.385284] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Releasing lock "refresh_cache-95ef4f91-a618-4ae2-95ad-d027c031f239" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.385646] env[63345]: DEBUG nova.compute.manager [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Instance network_info: |[{"id": "e65d10a2-1bac-4be4-846a-6fc94207c2b7", "address": "fa:16:3e:c5:a7:39", "network": {"id": "372a3368-2d7a-4380-b811-7ad477d85250", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-454648225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41afa63287424a549133615eb390bac7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape65d10a2-1b", "ovs_interfaceid": "e65d10a2-1bac-4be4-846a-6fc94207c2b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1037.385964] env[63345]: DEBUG oslo_concurrency.lockutils [req-83102e3a-e949-4099-83d0-a8c7beb1b25c req-114ffe8d-45dc-4706-8e85-e24694c5e173 service nova] Acquired lock "refresh_cache-95ef4f91-a618-4ae2-95ad-d027c031f239" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.386181] env[63345]: DEBUG nova.network.neutron [req-83102e3a-e949-4099-83d0-a8c7beb1b25c req-114ffe8d-45dc-4706-8e85-e24694c5e173 service nova] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Refreshing network info cache for port e65d10a2-1bac-4be4-846a-6fc94207c2b7 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1037.387436] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:a7:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b8af79a-31d5-4d78-93d7-3919aa1d9186', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e65d10a2-1bac-4be4-846a-6fc94207c2b7', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1037.395219] env[63345]: DEBUG oslo.service.loopingcall [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1037.399079] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8d8d0428-c2bb-4ca4-98c8-51de0aff75e2 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "0f3f59b6-e7bc-4657-af5f-eec18efc3666" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.118s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.399316] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1037.399984] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b5075c2-eadd-460a-a6ab-22d6567b7ab4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.424332] env[63345]: INFO nova.compute.manager [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Took 14.94 seconds to build instance. [ 1037.427816] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1037.427816] env[63345]: value = "task-1017744" [ 1037.427816] env[63345]: _type = "Task" [ 1037.427816] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.438044] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017744, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.470156] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017743, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128716} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.470390] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1037.471216] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ebfbff-bd56-4be4-b272-4aecfd0ecb87 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.491871] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] da3408a0-cce7-4252-be47-097f081d83c1/da3408a0-cce7-4252-be47-097f081d83c1.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1037.492622] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92fbd72c-5990-4621-8d10-970e55545cc4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.515456] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1037.515456] env[63345]: value = "task-1017745" [ 1037.515456] env[63345]: _type = "Task" [ 1037.515456] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.524582] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017745, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.610293] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f6699b30-427e-475c-bf5f-171539ce6309 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "a7d80763-92f0-45a9-b24b-1f973bffb376" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.300s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.776617] env[63345]: DEBUG oslo_concurrency.lockutils [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.777510] env[63345]: DEBUG oslo_concurrency.lockutils [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.777741] env[63345]: DEBUG oslo_concurrency.lockutils [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.778743] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42a0ea4-0f8b-40d0-a871-843af87e93d3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.800023] env[63345]: DEBUG nova.virt.hardware [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1037.800362] env[63345]: DEBUG nova.virt.hardware [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1037.800524] env[63345]: DEBUG nova.virt.hardware [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1037.800716] env[63345]: DEBUG nova.virt.hardware [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1037.800871] env[63345]: DEBUG nova.virt.hardware [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1037.801036] env[63345]: DEBUG nova.virt.hardware [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1037.801258] env[63345]: DEBUG nova.virt.hardware [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1037.801423] env[63345]: DEBUG nova.virt.hardware [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1037.801824] env[63345]: DEBUG nova.virt.hardware [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1037.802093] env[63345]: DEBUG nova.virt.hardware [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1037.802218] env[63345]: DEBUG nova.virt.hardware [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1037.809040] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Reconfiguring VM to attach interface {{(pid=63345) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1929}} [ 1037.809418] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c57ad93e-bc09-470d-ab2d-8f9fcad83c97 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.829641] env[63345]: DEBUG oslo_vmware.api [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 1037.829641] env[63345]: value = "task-1017746" [ 1037.829641] env[63345]: _type = "Task" [ 1037.829641] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.838749] env[63345]: DEBUG oslo_vmware.api [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017746, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.926735] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b9424d56-917f-4ed6-b1a8-d5be5d1bbae2 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "95738bee-d291-4f27-aeff-9445939bb3fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.450s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.937926] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017744, 'name': CreateVM_Task, 'duration_secs': 0.506618} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.938196] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1037.938945] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.939170] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.939581] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1037.939903] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d966924-291b-4fce-bdbb-a150ea269410 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.943053] env[63345]: DEBUG nova.network.neutron [req-83102e3a-e949-4099-83d0-a8c7beb1b25c req-114ffe8d-45dc-4706-8e85-e24694c5e173 service nova] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Updated VIF entry in instance network info cache for port e65d10a2-1bac-4be4-846a-6fc94207c2b7. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1037.943101] env[63345]: DEBUG nova.network.neutron [req-83102e3a-e949-4099-83d0-a8c7beb1b25c req-114ffe8d-45dc-4706-8e85-e24694c5e173 service nova] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Updating instance_info_cache with network_info: [{"id": "e65d10a2-1bac-4be4-846a-6fc94207c2b7", "address": "fa:16:3e:c5:a7:39", "network": {"id": "372a3368-2d7a-4380-b811-7ad477d85250", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-454648225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41afa63287424a549133615eb390bac7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape65d10a2-1b", "ovs_interfaceid": "e65d10a2-1bac-4be4-846a-6fc94207c2b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.948912] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1037.948912] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]527bb018-de41-21d9-1efe-fc7046b11e67" [ 1037.948912] env[63345]: _type = "Task" [ 1037.948912] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.958516] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]527bb018-de41-21d9-1efe-fc7046b11e67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.026597] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017745, 'name': ReconfigVM_Task, 'duration_secs': 0.433951} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.026899] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Reconfigured VM instance instance-00000069 to attach disk [datastore2] da3408a0-cce7-4252-be47-097f081d83c1/da3408a0-cce7-4252-be47-097f081d83c1.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1038.027555] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-98f850dc-296a-4932-9fb0-735fb8481706 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.040371] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1038.040371] env[63345]: value = "task-1017747" [ 1038.040371] env[63345]: _type = "Task" [ 1038.040371] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.042517] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017747, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.240043] env[63345]: DEBUG nova.compute.manager [req-d9302fa1-4254-4ae9-9227-4f86f0cc78c3 req-345e169f-4569-488b-9793-f82abda92e5f service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Received event network-changed-f9c53757-0ec3-4d99-9493-d12a48f28db3 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1038.240204] env[63345]: DEBUG nova.compute.manager [req-d9302fa1-4254-4ae9-9227-4f86f0cc78c3 req-345e169f-4569-488b-9793-f82abda92e5f service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Refreshing instance network info cache due to event network-changed-f9c53757-0ec3-4d99-9493-d12a48f28db3. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1038.240636] env[63345]: DEBUG oslo_concurrency.lockutils [req-d9302fa1-4254-4ae9-9227-4f86f0cc78c3 req-345e169f-4569-488b-9793-f82abda92e5f service nova] Acquiring lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1038.240989] env[63345]: DEBUG oslo_concurrency.lockutils [req-d9302fa1-4254-4ae9-9227-4f86f0cc78c3 req-345e169f-4569-488b-9793-f82abda92e5f service nova] Acquired lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.241187] env[63345]: DEBUG nova.network.neutron [req-d9302fa1-4254-4ae9-9227-4f86f0cc78c3 req-345e169f-4569-488b-9793-f82abda92e5f service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Refreshing network info cache for port f9c53757-0ec3-4d99-9493-d12a48f28db3 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1038.307235] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b74b79-f0ba-4a10-9b8a-2a3218837595 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.324392] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d35d41-0bf1-4de9-abd4-63b128a5194b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.342226] env[63345]: DEBUG oslo_vmware.api [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017746, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.370375] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38cac39e-f834-4c9a-8695-55e44581d33a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.378613] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c361d506-7749-4930-89e6-336a4faddae2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.392676] env[63345]: DEBUG nova.compute.provider_tree [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.446994] env[63345]: DEBUG oslo_concurrency.lockutils [req-83102e3a-e949-4099-83d0-a8c7beb1b25c req-114ffe8d-45dc-4706-8e85-e24694c5e173 service nova] Releasing lock "refresh_cache-95ef4f91-a618-4ae2-95ad-d027c031f239" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.460743] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]527bb018-de41-21d9-1efe-fc7046b11e67, 'name': SearchDatastore_Task, 'duration_secs': 0.014942} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.461216] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.461365] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1038.461569] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1038.461727] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.461998] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1038.462213] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-efbf998a-aabf-4045-b478-cd8ae0f1f932 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.471198] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1038.471406] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1038.472189] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b1e138a-1477-41a4-bb17-d5bc43e97872 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.477621] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1038.477621] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52cf25f8-cb2b-e695-cee4-b0471f83f8ca" [ 1038.477621] env[63345]: _type = "Task" [ 1038.477621] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.486012] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52cf25f8-cb2b-e695-cee4-b0471f83f8ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.543931] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017747, 'name': Rename_Task, 'duration_secs': 0.179594} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.544268] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1038.544574] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6294deb3-9c6b-40dc-a973-43e1d8d1af2d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.551763] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1038.551763] env[63345]: value = "task-1017748" [ 1038.551763] env[63345]: _type = "Task" [ 1038.551763] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.560286] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017748, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.628352] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "b3e0831b-b8f1-40c4-be01-71ed6484dbc0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.628352] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "b3e0831b-b8f1-40c4-be01-71ed6484dbc0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.648628] env[63345]: DEBUG oslo_vmware.rw_handles [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528b77b1-acaa-f4f5-2294-ebcef12bf34c/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1038.649785] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da82b5af-1258-4359-b5df-fa7d21b4a409 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.657635] env[63345]: DEBUG oslo_vmware.rw_handles [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528b77b1-acaa-f4f5-2294-ebcef12bf34c/disk-0.vmdk is in state: ready. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1038.657839] env[63345]: ERROR oslo_vmware.rw_handles [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528b77b1-acaa-f4f5-2294-ebcef12bf34c/disk-0.vmdk due to incomplete transfer. [ 1038.658106] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b58200ee-5078-450f-8de9-f82ca4bd1d61 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.666349] env[63345]: DEBUG oslo_vmware.rw_handles [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528b77b1-acaa-f4f5-2294-ebcef12bf34c/disk-0.vmdk. {{(pid=63345) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1038.666443] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Uploaded image e440825b-4356-4ee4-8b0a-5a0a6c082c83 to the Glance image server {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1038.669075] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Destroying the VM {{(pid=63345) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1038.669518] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-33c8cae1-8297-444d-8c4e-31dd9ba38a8b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.676393] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1038.676393] env[63345]: value = "task-1017749" [ 1038.676393] env[63345]: _type = "Task" [ 1038.676393] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.686106] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017749, 'name': Destroy_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.787137] env[63345]: DEBUG nova.compute.manager [req-145be6ba-95a8-4ecc-8c2e-b96b28a07fc2 req-475b5b68-295e-43f4-8122-69cecf9729f0 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Received event network-changed-f9b10cca-c2c3-45d2-a329-61efee5dde7f {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1038.787259] env[63345]: DEBUG nova.compute.manager [req-145be6ba-95a8-4ecc-8c2e-b96b28a07fc2 req-475b5b68-295e-43f4-8122-69cecf9729f0 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Refreshing instance network info cache due to event network-changed-f9b10cca-c2c3-45d2-a329-61efee5dde7f. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1038.787419] env[63345]: DEBUG oslo_concurrency.lockutils [req-145be6ba-95a8-4ecc-8c2e-b96b28a07fc2 req-475b5b68-295e-43f4-8122-69cecf9729f0 service nova] Acquiring lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1038.787583] env[63345]: DEBUG oslo_concurrency.lockutils [req-145be6ba-95a8-4ecc-8c2e-b96b28a07fc2 req-475b5b68-295e-43f4-8122-69cecf9729f0 service nova] Acquired lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.787756] env[63345]: DEBUG nova.network.neutron [req-145be6ba-95a8-4ecc-8c2e-b96b28a07fc2 req-475b5b68-295e-43f4-8122-69cecf9729f0 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Refreshing network info cache for port f9b10cca-c2c3-45d2-a329-61efee5dde7f {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1038.841972] env[63345]: DEBUG oslo_vmware.api [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017746, 'name': ReconfigVM_Task, 'duration_secs': 0.931196} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.842675] env[63345]: DEBUG oslo_concurrency.lockutils [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.842943] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Reconfigured VM to attach interface {{(pid=63345) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1943}} [ 1038.896248] env[63345]: DEBUG nova.scheduler.client.report [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1038.989623] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52cf25f8-cb2b-e695-cee4-b0471f83f8ca, 'name': SearchDatastore_Task, 'duration_secs': 0.011964} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.990500] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-045c71f3-0a44-4516-a54e-9a5afd8eeb55 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.993895] env[63345]: DEBUG nova.network.neutron [req-d9302fa1-4254-4ae9-9227-4f86f0cc78c3 req-345e169f-4569-488b-9793-f82abda92e5f service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Updated VIF entry in instance network info cache for port f9c53757-0ec3-4d99-9493-d12a48f28db3. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1038.994627] env[63345]: DEBUG nova.network.neutron [req-d9302fa1-4254-4ae9-9227-4f86f0cc78c3 req-345e169f-4569-488b-9793-f82abda92e5f service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Updating instance_info_cache with network_info: [{"id": "d6e5e759-86e1-4f76-9b65-19b2691780df", "address": "fa:16:3e:21:49:6e", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6e5e759-86", "ovs_interfaceid": "d6e5e759-86e1-4f76-9b65-19b2691780df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f9c53757-0ec3-4d99-9493-d12a48f28db3", "address": "fa:16:3e:2f:a3:4f", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9c53757-0e", "ovs_interfaceid": "f9c53757-0ec3-4d99-9493-d12a48f28db3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.997205] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1038.997205] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c39b14-98fd-290c-bbdd-9de765e4c6f9" [ 1038.997205] env[63345]: _type = "Task" [ 1038.997205] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.006936] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c39b14-98fd-290c-bbdd-9de765e4c6f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.061966] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017748, 'name': PowerOnVM_Task} progress is 81%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.130674] env[63345]: DEBUG nova.compute.manager [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1039.187120] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017749, 'name': Destroy_Task} progress is 33%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.352661] env[63345]: DEBUG oslo_concurrency.lockutils [None req-952a4b25-c08e-4416-91bc-1e2edc8d77d4 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2-f9c53757-0ec3-4d99-9493-d12a48f28db3" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.288s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.403437] env[63345]: DEBUG oslo_concurrency.lockutils [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.328s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.403897] env[63345]: DEBUG nova.compute.manager [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1039.498373] env[63345]: DEBUG oslo_concurrency.lockutils [req-d9302fa1-4254-4ae9-9227-4f86f0cc78c3 req-345e169f-4569-488b-9793-f82abda92e5f service nova] Releasing lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.509569] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c39b14-98fd-290c-bbdd-9de765e4c6f9, 'name': SearchDatastore_Task, 'duration_secs': 0.031227} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.509917] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.510225] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 95ef4f91-a618-4ae2-95ad-d027c031f239/95ef4f91-a618-4ae2-95ad-d027c031f239.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1039.510549] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fdb65159-528b-481c-9422-49a0b8ff3f78 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.518151] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1039.518151] env[63345]: value = "task-1017750" [ 1039.518151] env[63345]: _type = "Task" [ 1039.518151] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.528449] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017750, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.563118] env[63345]: DEBUG oslo_vmware.api [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017748, 'name': PowerOnVM_Task, 'duration_secs': 0.837418} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.563479] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1039.563751] env[63345]: INFO nova.compute.manager [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Took 7.30 seconds to spawn the instance on the hypervisor. [ 1039.563955] env[63345]: DEBUG nova.compute.manager [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1039.564871] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c2630d-eb1b-4ed4-89d8-bfbb62293f98 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.630126] env[63345]: DEBUG nova.network.neutron [req-145be6ba-95a8-4ecc-8c2e-b96b28a07fc2 req-475b5b68-295e-43f4-8122-69cecf9729f0 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Updated VIF entry in instance network info cache for port f9b10cca-c2c3-45d2-a329-61efee5dde7f. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1039.630627] env[63345]: DEBUG nova.network.neutron [req-145be6ba-95a8-4ecc-8c2e-b96b28a07fc2 req-475b5b68-295e-43f4-8122-69cecf9729f0 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Updating instance_info_cache with network_info: [{"id": "f9b10cca-c2c3-45d2-a329-61efee5dde7f", "address": "fa:16:3e:31:f2:fb", "network": {"id": "95d95c9b-b21c-4ee5-ab54-d0bf2699d38e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-88421441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba08f64c26d245a8b8f2b52ea97c2f1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9b10cca-c2", "ovs_interfaceid": "f9b10cca-c2c3-45d2-a329-61efee5dde7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.653542] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.653907] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.655478] env[63345]: INFO nova.compute.claims [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1039.691330] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017749, 'name': Destroy_Task, 'duration_secs': 0.632045} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.691735] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Destroyed the VM [ 1039.692115] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Deleting Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1039.692474] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a17cd2c3-3903-48db-988d-215880f1f148 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.700970] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1039.700970] env[63345]: value = "task-1017751" [ 1039.700970] env[63345]: _type = "Task" [ 1039.700970] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.714150] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017751, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.833309] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Acquiring lock "5cefe8a6-4af0-47d4-84f5-1d579d0c9968" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.833447] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Lock "5cefe8a6-4af0-47d4-84f5-1d579d0c9968" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.913331] env[63345]: DEBUG nova.compute.utils [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1039.915072] env[63345]: DEBUG nova.compute.manager [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1039.915316] env[63345]: DEBUG nova.network.neutron [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1039.960787] env[63345]: DEBUG nova.policy [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dd618fef89a843209784ca9e925d18eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cb91ecf5d00e48dea9baf2122ac4fed7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 1040.028738] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017750, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.087626] env[63345]: INFO nova.compute.manager [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Took 16.48 seconds to build instance. [ 1040.134124] env[63345]: DEBUG oslo_concurrency.lockutils [req-145be6ba-95a8-4ecc-8c2e-b96b28a07fc2 req-475b5b68-295e-43f4-8122-69cecf9729f0 service nova] Releasing lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.210949] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017751, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.338524] env[63345]: DEBUG nova.compute.manager [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1040.346020] env[63345]: DEBUG nova.network.neutron [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Successfully created port: 4829f314-cace-49cc-b77a-016ee4b1c7e6 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1040.419681] env[63345]: DEBUG nova.compute.manager [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1040.529398] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017750, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.711637} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.529680] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 95ef4f91-a618-4ae2-95ad-d027c031f239/95ef4f91-a618-4ae2-95ad-d027c031f239.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1040.529907] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1040.530176] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-640ef30b-2cc6-4368-a143-d0febb66bcc5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.536952] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1040.536952] env[63345]: value = "task-1017752" [ 1040.536952] env[63345]: _type = "Task" [ 1040.536952] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.546081] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017752, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.590499] env[63345]: DEBUG oslo_concurrency.lockutils [None req-54db4300-37a3-4464-bcaf-1099a9ac4537 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "da3408a0-cce7-4252-be47-097f081d83c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.987s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.717852] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017751, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.820387] env[63345]: DEBUG oslo_concurrency.lockutils [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "interface-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2-f9c53757-0ec3-4d99-9493-d12a48f28db3" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.820387] env[63345]: DEBUG oslo_concurrency.lockutils [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2-f9c53757-0ec3-4d99-9493-d12a48f28db3" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.865018] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3723b13b-1596-4ee2-909f-8e35e18ccfaa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.872101] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6849da-1e0e-4cbd-913b-68cdd0931ae6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.876777] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.909552] env[63345]: INFO nova.compute.manager [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Rebuilding instance [ 1040.912145] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd70006-0890-4e86-81db-946623b9049d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.922940] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a1ce14-69f7-458d-be20-c4ff5f6afd92 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.927855] env[63345]: INFO nova.virt.block_device [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Booting with volume 20d77566-3841-4d48-8c1e-d94d3b3b3333 at /dev/sda [ 1040.942261] env[63345]: DEBUG nova.compute.provider_tree [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.963265] env[63345]: DEBUG nova.compute.manager [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1040.964136] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f40d23a-60f4-40be-ac2d-75961ce4b518 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.980535] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f0e5e084-ae30-4749-95c0-fe42f72543c4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.989868] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2496744d-9fe2-4eae-addd-8e8fb6b546c6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.020449] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6763f0c-ba77-4156-8e97-f470e26fe5c0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.029902] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce18ef6-d638-4dd2-b1df-690d677c6561 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.047431] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017752, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.168141} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.048152] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1041.048445] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d2a155-fd39-48b4-86df-3cece2427389 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.069412] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] 95ef4f91-a618-4ae2-95ad-d027c031f239/95ef4f91-a618-4ae2-95ad-d027c031f239.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1041.080137] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44131918-f641-43b8-b9b2-ae5a54d30466 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.094964] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d75ee30-7f50-41d3-bf7e-510756ee2056 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.103227] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc99299-73db-4ec7-8146-1a3bce9d2ca2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.105471] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1041.105471] env[63345]: value = "task-1017753" [ 1041.105471] env[63345]: _type = "Task" [ 1041.105471] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.114342] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017753, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.117583] env[63345]: DEBUG nova.virt.block_device [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Updating existing volume attachment record: e0bfa2bd-844e-4580-96fb-30414a149211 {{(pid=63345) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1041.212125] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017751, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.322625] env[63345]: DEBUG oslo_concurrency.lockutils [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.322975] env[63345]: DEBUG oslo_concurrency.lockutils [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.323865] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d45b7f2-dff8-49bf-be6f-c4e1f3fbc5fe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.341879] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cecdc6b-df50-499d-96ff-85f7c9bd02fb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.366503] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Reconfiguring VM to detach interface {{(pid=63345) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1974}} [ 1041.366834] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b166187-7976-418f-944b-26c52541d879 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.386305] env[63345]: DEBUG oslo_vmware.api [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 1041.386305] env[63345]: value = "task-1017754" [ 1041.386305] env[63345]: _type = "Task" [ 1041.386305] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.394332] env[63345]: DEBUG oslo_vmware.api [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017754, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.451236] env[63345]: DEBUG nova.scheduler.client.report [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1041.617462] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017753, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.713090] env[63345]: DEBUG oslo_vmware.api [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017751, 'name': RemoveSnapshot_Task, 'duration_secs': 1.560048} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.713392] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Deleted Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1041.713641] env[63345]: INFO nova.compute.manager [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Took 15.74 seconds to snapshot the instance on the hypervisor. [ 1041.896271] env[63345]: DEBUG oslo_vmware.api [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017754, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.958021] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.302s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.958021] env[63345]: DEBUG nova.compute.manager [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1041.959657] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.083s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.961270] env[63345]: INFO nova.compute.claims [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1041.977723] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1041.978261] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76583760-dad7-4f31-b0b9-b2549483073a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.988028] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1041.988028] env[63345]: value = "task-1017755" [ 1041.988028] env[63345]: _type = "Task" [ 1041.988028] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.997355] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017755, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.118256] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017753, 'name': ReconfigVM_Task, 'duration_secs': 0.764174} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.118514] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Reconfigured VM instance instance-0000006a to attach disk [datastore2] 95ef4f91-a618-4ae2-95ad-d027c031f239/95ef4f91-a618-4ae2-95ad-d027c031f239.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1042.119058] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9b8d0c7b-718a-4aa3-a722-9a3a502b5809 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.126327] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1042.126327] env[63345]: value = "task-1017756" [ 1042.126327] env[63345]: _type = "Task" [ 1042.126327] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.137577] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017756, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.196416] env[63345]: DEBUG nova.compute.manager [req-d7e336ed-3b57-4d67-b55c-ffc6122c30c0 req-6f96c7ac-1653-40bc-988f-461617d2e048 service nova] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Received event network-vif-plugged-4829f314-cace-49cc-b77a-016ee4b1c7e6 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1042.196632] env[63345]: DEBUG oslo_concurrency.lockutils [req-d7e336ed-3b57-4d67-b55c-ffc6122c30c0 req-6f96c7ac-1653-40bc-988f-461617d2e048 service nova] Acquiring lock "bce78147-6f6d-47a2-84f3-482f59a8bb8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.197027] env[63345]: DEBUG oslo_concurrency.lockutils [req-d7e336ed-3b57-4d67-b55c-ffc6122c30c0 req-6f96c7ac-1653-40bc-988f-461617d2e048 service nova] Lock "bce78147-6f6d-47a2-84f3-482f59a8bb8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.197214] env[63345]: DEBUG oslo_concurrency.lockutils [req-d7e336ed-3b57-4d67-b55c-ffc6122c30c0 req-6f96c7ac-1653-40bc-988f-461617d2e048 service nova] Lock "bce78147-6f6d-47a2-84f3-482f59a8bb8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.197405] env[63345]: DEBUG nova.compute.manager [req-d7e336ed-3b57-4d67-b55c-ffc6122c30c0 req-6f96c7ac-1653-40bc-988f-461617d2e048 service nova] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] No waiting events found dispatching network-vif-plugged-4829f314-cace-49cc-b77a-016ee4b1c7e6 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1042.197574] env[63345]: WARNING nova.compute.manager [req-d7e336ed-3b57-4d67-b55c-ffc6122c30c0 req-6f96c7ac-1653-40bc-988f-461617d2e048 service nova] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Received unexpected event network-vif-plugged-4829f314-cace-49cc-b77a-016ee4b1c7e6 for instance with vm_state building and task_state block_device_mapping. [ 1042.262080] env[63345]: DEBUG nova.compute.manager [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Found 3 images (rotation: 2) {{(pid=63345) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4883}} [ 1042.262325] env[63345]: DEBUG nova.compute.manager [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Rotating out 1 backups {{(pid=63345) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4891}} [ 1042.262492] env[63345]: DEBUG nova.compute.manager [None req-73457db2-b2f1-4d9d-b2c1-6ca515137362 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Deleting image fdfac4b6-e678-4e72-af5e-15f172b83432 {{(pid=63345) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4896}} [ 1042.397777] env[63345]: DEBUG oslo_vmware.api [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017754, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.466068] env[63345]: DEBUG nova.compute.utils [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1042.469105] env[63345]: DEBUG nova.compute.manager [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1042.469295] env[63345]: DEBUG nova.network.neutron [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1042.496049] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017755, 'name': PowerOffVM_Task, 'duration_secs': 0.198591} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.496355] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1042.496661] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1042.497536] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6852ef-0f21-4516-b875-96853c79fe72 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.504746] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1042.505017] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93786757-955b-421e-ab6e-08a0d29b255b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.507616] env[63345]: DEBUG nova.policy [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb6730bb6292421e8f943bce2e912bef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c535ae9067ab4e8a87e95c68af4624fb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 1042.531825] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1042.531825] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1042.532061] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Deleting the datastore file [datastore2] da3408a0-cce7-4252-be47-097f081d83c1 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1042.532191] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd61d6de-46f2-41e7-b599-88738b3534bc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.539038] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1042.539038] env[63345]: value = "task-1017758" [ 1042.539038] env[63345]: _type = "Task" [ 1042.539038] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.548897] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017758, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.636195] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017756, 'name': Rename_Task, 'duration_secs': 0.234054} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.636470] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1042.636740] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-898d499e-2d5f-488f-ae40-61f99662a2a7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.643208] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1042.643208] env[63345]: value = "task-1017759" [ 1042.643208] env[63345]: _type = "Task" [ 1042.643208] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.654462] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017759, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.763282] env[63345]: DEBUG nova.network.neutron [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Successfully updated port: 4829f314-cace-49cc-b77a-016ee4b1c7e6 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1042.785039] env[63345]: DEBUG nova.network.neutron [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Successfully created port: a4151246-fc54-4f99-9110-a65de119bea6 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1042.829274] env[63345]: DEBUG nova.compute.manager [req-321f6b61-1472-446f-bf52-cd4089b3299c req-37ac067e-9779-481c-8826-77348919a77c service nova] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Received event network-changed-4829f314-cace-49cc-b77a-016ee4b1c7e6 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1042.829274] env[63345]: DEBUG nova.compute.manager [req-321f6b61-1472-446f-bf52-cd4089b3299c req-37ac067e-9779-481c-8826-77348919a77c service nova] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Refreshing instance network info cache due to event network-changed-4829f314-cace-49cc-b77a-016ee4b1c7e6. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1042.829274] env[63345]: DEBUG oslo_concurrency.lockutils [req-321f6b61-1472-446f-bf52-cd4089b3299c req-37ac067e-9779-481c-8826-77348919a77c service nova] Acquiring lock "refresh_cache-bce78147-6f6d-47a2-84f3-482f59a8bb8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1042.829274] env[63345]: DEBUG oslo_concurrency.lockutils [req-321f6b61-1472-446f-bf52-cd4089b3299c req-37ac067e-9779-481c-8826-77348919a77c service nova] Acquired lock "refresh_cache-bce78147-6f6d-47a2-84f3-482f59a8bb8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.829927] env[63345]: DEBUG nova.network.neutron [req-321f6b61-1472-446f-bf52-cd4089b3299c req-37ac067e-9779-481c-8826-77348919a77c service nova] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Refreshing network info cache for port 4829f314-cace-49cc-b77a-016ee4b1c7e6 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1042.898378] env[63345]: DEBUG oslo_vmware.api [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017754, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.969515] env[63345]: DEBUG nova.compute.manager [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1043.048526] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017758, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.253925} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.050995] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1043.051218] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1043.051409] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1043.153438] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017759, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.179213] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889bd3f0-f063-42f3-9190-30ee6932249a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.187178] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34bade69-0773-46de-ad0b-b7059c54f630 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.220847] env[63345]: DEBUG nova.compute.manager [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1043.221406] env[63345]: DEBUG nova.virt.hardware [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1043.221628] env[63345]: DEBUG nova.virt.hardware [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1043.221795] env[63345]: DEBUG nova.virt.hardware [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1043.221984] env[63345]: DEBUG nova.virt.hardware [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1043.222157] env[63345]: DEBUG nova.virt.hardware [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1043.222314] env[63345]: DEBUG nova.virt.hardware [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1043.222525] env[63345]: DEBUG nova.virt.hardware [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1043.222754] env[63345]: DEBUG nova.virt.hardware [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1043.222959] env[63345]: DEBUG nova.virt.hardware [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1043.223157] env[63345]: DEBUG nova.virt.hardware [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1043.223341] env[63345]: DEBUG nova.virt.hardware [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1043.224342] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee75a1ab-e686-45a6-891a-5cf177a16e94 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.227403] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd30f58-85b2-44b9-ac02-a375ad200122 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.237603] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2042f164-3fbe-424c-a688-68ef0c215a44 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.246025] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1a1644-ab9b-4642-9a42-6185b8a1c8da {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.258968] env[63345]: DEBUG nova.compute.provider_tree [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.268755] env[63345]: DEBUG oslo_concurrency.lockutils [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "refresh_cache-bce78147-6f6d-47a2-84f3-482f59a8bb8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.394195] env[63345]: DEBUG nova.network.neutron [req-321f6b61-1472-446f-bf52-cd4089b3299c req-37ac067e-9779-481c-8826-77348919a77c service nova] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1043.404368] env[63345]: DEBUG oslo_vmware.api [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017754, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.508256] env[63345]: DEBUG nova.network.neutron [req-321f6b61-1472-446f-bf52-cd4089b3299c req-37ac067e-9779-481c-8826-77348919a77c service nova] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.655063] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017759, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.768326] env[63345]: DEBUG nova.scheduler.client.report [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1043.899387] env[63345]: DEBUG oslo_vmware.api [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017754, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.982507] env[63345]: DEBUG nova.compute.manager [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1044.006231] env[63345]: DEBUG nova.virt.hardware [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1044.006501] env[63345]: DEBUG nova.virt.hardware [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1044.006671] env[63345]: DEBUG nova.virt.hardware [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1044.006868] env[63345]: DEBUG nova.virt.hardware [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1044.007035] env[63345]: DEBUG nova.virt.hardware [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1044.007208] env[63345]: DEBUG nova.virt.hardware [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1044.007419] env[63345]: DEBUG nova.virt.hardware [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1044.007587] env[63345]: DEBUG nova.virt.hardware [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1044.007768] env[63345]: DEBUG nova.virt.hardware [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1044.008014] env[63345]: DEBUG nova.virt.hardware [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1044.008134] env[63345]: DEBUG nova.virt.hardware [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1044.009030] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7835ad0c-97d1-4a25-9e96-6a985767f133 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.011852] env[63345]: DEBUG oslo_concurrency.lockutils [req-321f6b61-1472-446f-bf52-cd4089b3299c req-37ac067e-9779-481c-8826-77348919a77c service nova] Releasing lock "refresh_cache-bce78147-6f6d-47a2-84f3-482f59a8bb8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.012221] env[63345]: DEBUG oslo_concurrency.lockutils [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired lock "refresh_cache-bce78147-6f6d-47a2-84f3-482f59a8bb8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.012428] env[63345]: DEBUG nova.network.neutron [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1044.019875] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55758906-0ebe-4b10-acc2-82c3fe3ff548 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.083070] env[63345]: DEBUG nova.virt.hardware [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1044.083346] env[63345]: DEBUG nova.virt.hardware [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1044.083511] env[63345]: DEBUG nova.virt.hardware [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1044.083710] env[63345]: DEBUG nova.virt.hardware [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1044.083861] env[63345]: DEBUG nova.virt.hardware [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1044.084021] env[63345]: DEBUG nova.virt.hardware [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1044.084244] env[63345]: DEBUG nova.virt.hardware [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1044.084658] env[63345]: DEBUG nova.virt.hardware [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1044.084658] env[63345]: DEBUG nova.virt.hardware [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1044.084776] env[63345]: DEBUG nova.virt.hardware [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1044.084908] env[63345]: DEBUG nova.virt.hardware [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1044.085805] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d2fa15-e2b7-4a82-8d33-26a30cff3e45 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.095665] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e38f65d-099d-4254-8fff-5ad600ca6a25 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.109923] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Instance VIF info [] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1044.116234] env[63345]: DEBUG oslo.service.loopingcall [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1044.116526] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1044.116758] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b3104db-12d9-4c9b-a7db-81db090a6417 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.135015] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1044.135015] env[63345]: value = "task-1017760" [ 1044.135015] env[63345]: _type = "Task" [ 1044.135015] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.143144] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017760, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.154336] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017759, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.273102] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.313s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.273707] env[63345]: DEBUG nova.compute.manager [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1044.400666] env[63345]: DEBUG oslo_vmware.api [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017754, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.500477] env[63345]: DEBUG nova.network.neutron [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Successfully updated port: a4151246-fc54-4f99-9110-a65de119bea6 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1044.544773] env[63345]: DEBUG nova.compute.manager [req-8aba45e5-60c5-4bbf-a02e-c58a07e6388c req-cc6c9b5d-d293-42e3-b0f5-95239a67e772 service nova] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Received event network-vif-plugged-a4151246-fc54-4f99-9110-a65de119bea6 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1044.545143] env[63345]: DEBUG oslo_concurrency.lockutils [req-8aba45e5-60c5-4bbf-a02e-c58a07e6388c req-cc6c9b5d-d293-42e3-b0f5-95239a67e772 service nova] Acquiring lock "b3e0831b-b8f1-40c4-be01-71ed6484dbc0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.545277] env[63345]: DEBUG oslo_concurrency.lockutils [req-8aba45e5-60c5-4bbf-a02e-c58a07e6388c req-cc6c9b5d-d293-42e3-b0f5-95239a67e772 service nova] Lock "b3e0831b-b8f1-40c4-be01-71ed6484dbc0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.545441] env[63345]: DEBUG oslo_concurrency.lockutils [req-8aba45e5-60c5-4bbf-a02e-c58a07e6388c req-cc6c9b5d-d293-42e3-b0f5-95239a67e772 service nova] Lock "b3e0831b-b8f1-40c4-be01-71ed6484dbc0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.545617] env[63345]: DEBUG nova.compute.manager [req-8aba45e5-60c5-4bbf-a02e-c58a07e6388c req-cc6c9b5d-d293-42e3-b0f5-95239a67e772 service nova] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] No waiting events found dispatching network-vif-plugged-a4151246-fc54-4f99-9110-a65de119bea6 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1044.545795] env[63345]: WARNING nova.compute.manager [req-8aba45e5-60c5-4bbf-a02e-c58a07e6388c req-cc6c9b5d-d293-42e3-b0f5-95239a67e772 service nova] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Received unexpected event network-vif-plugged-a4151246-fc54-4f99-9110-a65de119bea6 for instance with vm_state building and task_state spawning. [ 1044.546814] env[63345]: DEBUG nova.network.neutron [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1044.645388] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017760, 'name': CreateVM_Task, 'duration_secs': 0.328195} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.645569] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1044.646035] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.646174] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.646537] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1044.647120] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84173f1e-cecd-40fe-b2b3-4ed415a84079 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.655489] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1044.655489] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5263aace-db4e-5c3a-9cdd-d31a82bd4d1e" [ 1044.655489] env[63345]: _type = "Task" [ 1044.655489] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.661084] env[63345]: DEBUG oslo_vmware.api [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017759, 'name': PowerOnVM_Task, 'duration_secs': 1.587477} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.664128] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1044.664358] env[63345]: INFO nova.compute.manager [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Took 10.08 seconds to spawn the instance on the hypervisor. [ 1044.664547] env[63345]: DEBUG nova.compute.manager [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1044.665615] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b313ce7-49cf-4c31-a024-9162844f980b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.679746] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5263aace-db4e-5c3a-9cdd-d31a82bd4d1e, 'name': SearchDatastore_Task, 'duration_secs': 0.010122} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.679746] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.679746] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1044.679746] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.679746] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.679746] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1044.680249] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58c15f78-ecee-4b53-a8a2-ee49cbec8c9f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.689908] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1044.690102] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1044.690835] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9dbda911-d723-4d3c-9d7a-894252c0d3b1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.696501] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1044.696501] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52faf6d1-bba9-046c-0743-39010c075081" [ 1044.696501] env[63345]: _type = "Task" [ 1044.696501] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.704132] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52faf6d1-bba9-046c-0743-39010c075081, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.705106] env[63345]: DEBUG nova.network.neutron [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Updating instance_info_cache with network_info: [{"id": "4829f314-cace-49cc-b77a-016ee4b1c7e6", "address": "fa:16:3e:27:64:94", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4829f314-ca", "ovs_interfaceid": "4829f314-cace-49cc-b77a-016ee4b1c7e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.779444] env[63345]: DEBUG nova.compute.utils [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1044.780824] env[63345]: DEBUG nova.compute.manager [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Not allocating networking since 'none' was specified. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 1044.900602] env[63345]: DEBUG oslo_vmware.api [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017754, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.003895] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "refresh_cache-b3e0831b-b8f1-40c4-be01-71ed6484dbc0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.004074] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "refresh_cache-b3e0831b-b8f1-40c4-be01-71ed6484dbc0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.004236] env[63345]: DEBUG nova.network.neutron [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1045.113725] env[63345]: DEBUG oslo_concurrency.lockutils [None req-964b90c2-b015-4b26-8960-5d7a28bdf614 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "726332dd-8699-49a4-a9ea-b9cbfc159855" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.114037] env[63345]: DEBUG oslo_concurrency.lockutils [None req-964b90c2-b015-4b26-8960-5d7a28bdf614 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "726332dd-8699-49a4-a9ea-b9cbfc159855" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.114244] env[63345]: DEBUG nova.compute.manager [None req-964b90c2-b015-4b26-8960-5d7a28bdf614 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1045.115134] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2891076-5342-4379-8798-bf6886c0ef9f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.122616] env[63345]: DEBUG nova.compute.manager [None req-964b90c2-b015-4b26-8960-5d7a28bdf614 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63345) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3403}} [ 1045.123500] env[63345]: DEBUG nova.objects.instance [None req-964b90c2-b015-4b26-8960-5d7a28bdf614 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lazy-loading 'flavor' on Instance uuid 726332dd-8699-49a4-a9ea-b9cbfc159855 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1045.185331] env[63345]: INFO nova.compute.manager [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Took 19.10 seconds to build instance. [ 1045.207735] env[63345]: DEBUG oslo_concurrency.lockutils [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Releasing lock "refresh_cache-bce78147-6f6d-47a2-84f3-482f59a8bb8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.208074] env[63345]: DEBUG nova.compute.manager [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Instance network_info: |[{"id": "4829f314-cace-49cc-b77a-016ee4b1c7e6", "address": "fa:16:3e:27:64:94", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4829f314-ca", "ovs_interfaceid": "4829f314-cace-49cc-b77a-016ee4b1c7e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1045.208383] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52faf6d1-bba9-046c-0743-39010c075081, 'name': SearchDatastore_Task, 'duration_secs': 0.019552} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.208722] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:64:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '68add7d6-c025-46fa-84d3-9c589adb63e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4829f314-cace-49cc-b77a-016ee4b1c7e6', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1045.216940] env[63345]: DEBUG oslo.service.loopingcall [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1045.217960] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1045.218184] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-608c92b5-3009-4de1-9778-7a4060298347 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.220415] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-67e50763-c297-4bdb-ad36-3b7ef6bc56b2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.239391] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1045.239391] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]523a84b4-c552-bd30-9ee1-c2c300e49f50" [ 1045.239391] env[63345]: _type = "Task" [ 1045.239391] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.240537] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1045.240537] env[63345]: value = "task-1017761" [ 1045.240537] env[63345]: _type = "Task" [ 1045.240537] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.251195] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017761, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.254435] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]523a84b4-c552-bd30-9ee1-c2c300e49f50, 'name': SearchDatastore_Task, 'duration_secs': 0.00964} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.254674] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.254933] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] da3408a0-cce7-4252-be47-097f081d83c1/da3408a0-cce7-4252-be47-097f081d83c1.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1045.255184] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6fcff744-779a-414b-aa96-c48da81b34bb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.261096] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1045.261096] env[63345]: value = "task-1017762" [ 1045.261096] env[63345]: _type = "Task" [ 1045.261096] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.268646] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017762, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.282253] env[63345]: DEBUG nova.compute.manager [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1045.402670] env[63345]: DEBUG oslo_vmware.api [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017754, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.535847] env[63345]: DEBUG nova.network.neutron [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1045.678494] env[63345]: DEBUG nova.network.neutron [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Updating instance_info_cache with network_info: [{"id": "a4151246-fc54-4f99-9110-a65de119bea6", "address": "fa:16:3e:0f:cc:58", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4151246-fc", "ovs_interfaceid": "a4151246-fc54-4f99-9110-a65de119bea6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.687875] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c48a35a-a104-4951-99c3-ff049c2f2070 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "95ef4f91-a618-4ae2-95ad-d027c031f239" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.606s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.754887] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017761, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.772715] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017762, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.902543] env[63345]: DEBUG oslo_vmware.api [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017754, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.134063] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-964b90c2-b015-4b26-8960-5d7a28bdf614 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1046.134063] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-97f53071-3c01-4acf-bb34-44f383aae9d8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.142018] env[63345]: DEBUG oslo_vmware.api [None req-964b90c2-b015-4b26-8960-5d7a28bdf614 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1046.142018] env[63345]: value = "task-1017763" [ 1046.142018] env[63345]: _type = "Task" [ 1046.142018] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.148181] env[63345]: DEBUG oslo_vmware.api [None req-964b90c2-b015-4b26-8960-5d7a28bdf614 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017763, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.182135] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "refresh_cache-b3e0831b-b8f1-40c4-be01-71ed6484dbc0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.182877] env[63345]: DEBUG nova.compute.manager [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Instance network_info: |[{"id": "a4151246-fc54-4f99-9110-a65de119bea6", "address": "fa:16:3e:0f:cc:58", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4151246-fc", "ovs_interfaceid": "a4151246-fc54-4f99-9110-a65de119bea6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1046.183570] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:cc:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f77ff7a1-209c-4f3f-b2a0-fd817741e739', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4151246-fc54-4f99-9110-a65de119bea6', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1046.191630] env[63345]: DEBUG oslo.service.loopingcall [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1046.191630] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1046.192070] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-28f47c5d-28ca-44b6-9efd-905606677964 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.211517] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1046.211517] env[63345]: value = "task-1017764" [ 1046.211517] env[63345]: _type = "Task" [ 1046.211517] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.218885] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017764, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.254576] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017761, 'name': CreateVM_Task, 'duration_secs': 0.654679} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.254777] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1046.255444] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226146', 'volume_id': '20d77566-3841-4d48-8c1e-d94d3b3b3333', 'name': 'volume-20d77566-3841-4d48-8c1e-d94d3b3b3333', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bce78147-6f6d-47a2-84f3-482f59a8bb8e', 'attached_at': '', 'detached_at': '', 'volume_id': '20d77566-3841-4d48-8c1e-d94d3b3b3333', 'serial': '20d77566-3841-4d48-8c1e-d94d3b3b3333'}, 'delete_on_termination': True, 'mount_device': '/dev/sda', 'device_type': None, 'boot_index': 0, 'disk_bus': None, 'guest_format': None, 'attachment_id': 'e0bfa2bd-844e-4580-96fb-30414a149211', 'volume_type': None}], 'swap': None} {{(pid=63345) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1046.255703] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Root volume attach. Driver type: vmdk {{(pid=63345) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1046.256581] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38cafb2f-afce-4fcb-91f8-0eb602aca67d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.267747] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a4b7ab-0c3a-4a4a-932b-4d5196180929 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.275286] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017762, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.626357} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.276935] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] da3408a0-cce7-4252-be47-097f081d83c1/da3408a0-cce7-4252-be47-097f081d83c1.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1046.277219] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1046.277535] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6c8fd1de-2d02-4c17-9dca-f67f07c02156 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.279953] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63fb5731-1609-4784-9fba-0d946d839e08 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.289063] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-5cd838e7-b3c3-4265-bc0f-4b0d6724ad01 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.291850] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1046.291850] env[63345]: value = "task-1017765" [ 1046.291850] env[63345]: _type = "Task" [ 1046.291850] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.298067] env[63345]: DEBUG nova.compute.manager [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1046.302586] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1046.302586] env[63345]: value = "task-1017766" [ 1046.302586] env[63345]: _type = "Task" [ 1046.302586] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.310265] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017765, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.315527] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017766, 'name': RelocateVM_Task} progress is 7%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.324183] env[63345]: DEBUG nova.compute.manager [req-30796f8a-4ec2-4327-a12f-b4014a0ec97a req-ce777622-6743-4532-9967-399e5bef9a83 service nova] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Received event network-changed-e65d10a2-1bac-4be4-846a-6fc94207c2b7 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1046.324441] env[63345]: DEBUG nova.compute.manager [req-30796f8a-4ec2-4327-a12f-b4014a0ec97a req-ce777622-6743-4532-9967-399e5bef9a83 service nova] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Refreshing instance network info cache due to event network-changed-e65d10a2-1bac-4be4-846a-6fc94207c2b7. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1046.324730] env[63345]: DEBUG oslo_concurrency.lockutils [req-30796f8a-4ec2-4327-a12f-b4014a0ec97a req-ce777622-6743-4532-9967-399e5bef9a83 service nova] Acquiring lock "refresh_cache-95ef4f91-a618-4ae2-95ad-d027c031f239" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.324895] env[63345]: DEBUG oslo_concurrency.lockutils [req-30796f8a-4ec2-4327-a12f-b4014a0ec97a req-ce777622-6743-4532-9967-399e5bef9a83 service nova] Acquired lock "refresh_cache-95ef4f91-a618-4ae2-95ad-d027c031f239" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.325173] env[63345]: DEBUG nova.network.neutron [req-30796f8a-4ec2-4327-a12f-b4014a0ec97a req-ce777622-6743-4532-9967-399e5bef9a83 service nova] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Refreshing network info cache for port e65d10a2-1bac-4be4-846a-6fc94207c2b7 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1046.328971] env[63345]: DEBUG nova.virt.hardware [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1046.329244] env[63345]: DEBUG nova.virt.hardware [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1046.329420] env[63345]: DEBUG nova.virt.hardware [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1046.329633] env[63345]: DEBUG nova.virt.hardware [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1046.329849] env[63345]: DEBUG nova.virt.hardware [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1046.330060] env[63345]: DEBUG nova.virt.hardware [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1046.330290] env[63345]: DEBUG nova.virt.hardware [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1046.330493] env[63345]: DEBUG nova.virt.hardware [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1046.330703] env[63345]: DEBUG nova.virt.hardware [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1046.330891] env[63345]: DEBUG nova.virt.hardware [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1046.331126] env[63345]: DEBUG nova.virt.hardware [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1046.332367] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62103982-95c9-46b5-bef8-508d392e1768 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.346395] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b940510-f26e-4786-8c27-ac5c7033cd06 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.362157] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Instance VIF info [] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1046.367828] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Creating folder: Project (c623f633687c4aee9b6933dea870b072). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1046.368237] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-688557a3-7a6b-48fe-a28e-f764a3f3aea4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.380125] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Created folder: Project (c623f633687c4aee9b6933dea870b072) in parent group-v225918. [ 1046.380341] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Creating folder: Instances. Parent ref: group-v226160. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1046.380591] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25f943a0-3333-4112-bca7-fa295cd6dac1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.391321] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Created folder: Instances in parent group-v226160. [ 1046.391540] env[63345]: DEBUG oslo.service.loopingcall [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1046.391745] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1046.391972] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b0f40d9-4d70-4693-8585-f4add167cdd1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.418810] env[63345]: DEBUG oslo_vmware.api [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017754, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.422363] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1046.422363] env[63345]: value = "task-1017769" [ 1046.422363] env[63345]: _type = "Task" [ 1046.422363] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.430739] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017769, 'name': CreateVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.584196] env[63345]: DEBUG nova.compute.manager [req-6ad41b19-9243-4d58-b037-41377ee00700 req-72ab3a99-9806-4172-ad6a-4ff798c572ce service nova] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Received event network-changed-a4151246-fc54-4f99-9110-a65de119bea6 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1046.584425] env[63345]: DEBUG nova.compute.manager [req-6ad41b19-9243-4d58-b037-41377ee00700 req-72ab3a99-9806-4172-ad6a-4ff798c572ce service nova] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Refreshing instance network info cache due to event network-changed-a4151246-fc54-4f99-9110-a65de119bea6. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1046.584675] env[63345]: DEBUG oslo_concurrency.lockutils [req-6ad41b19-9243-4d58-b037-41377ee00700 req-72ab3a99-9806-4172-ad6a-4ff798c572ce service nova] Acquiring lock "refresh_cache-b3e0831b-b8f1-40c4-be01-71ed6484dbc0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.584847] env[63345]: DEBUG oslo_concurrency.lockutils [req-6ad41b19-9243-4d58-b037-41377ee00700 req-72ab3a99-9806-4172-ad6a-4ff798c572ce service nova] Acquired lock "refresh_cache-b3e0831b-b8f1-40c4-be01-71ed6484dbc0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.585045] env[63345]: DEBUG nova.network.neutron [req-6ad41b19-9243-4d58-b037-41377ee00700 req-72ab3a99-9806-4172-ad6a-4ff798c572ce service nova] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Refreshing network info cache for port a4151246-fc54-4f99-9110-a65de119bea6 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1046.591320] env[63345]: DEBUG nova.network.neutron [req-30796f8a-4ec2-4327-a12f-b4014a0ec97a req-ce777622-6743-4532-9967-399e5bef9a83 service nova] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Updated VIF entry in instance network info cache for port e65d10a2-1bac-4be4-846a-6fc94207c2b7. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1046.591984] env[63345]: DEBUG nova.network.neutron [req-30796f8a-4ec2-4327-a12f-b4014a0ec97a req-ce777622-6743-4532-9967-399e5bef9a83 service nova] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Updating instance_info_cache with network_info: [{"id": "e65d10a2-1bac-4be4-846a-6fc94207c2b7", "address": "fa:16:3e:c5:a7:39", "network": {"id": "372a3368-2d7a-4380-b811-7ad477d85250", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-454648225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41afa63287424a549133615eb390bac7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape65d10a2-1b", "ovs_interfaceid": "e65d10a2-1bac-4be4-846a-6fc94207c2b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.649847] env[63345]: DEBUG oslo_vmware.api [None req-964b90c2-b015-4b26-8960-5d7a28bdf614 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017763, 'name': PowerOffVM_Task, 'duration_secs': 0.246562} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.650165] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-964b90c2-b015-4b26-8960-5d7a28bdf614 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1046.650381] env[63345]: DEBUG nova.compute.manager [None req-964b90c2-b015-4b26-8960-5d7a28bdf614 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1046.651475] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90a9fe5-84a8-43c5-8c32-e79223023909 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.722781] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017764, 'name': CreateVM_Task, 'duration_secs': 0.417452} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.723125] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1046.723974] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.724315] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.724733] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1046.725094] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2efbb15d-c24a-4f8c-bb80-10999047334f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.734443] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1046.734443] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]524aeafc-ad86-8791-2831-115473386340" [ 1046.734443] env[63345]: _type = "Task" [ 1046.734443] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.746798] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524aeafc-ad86-8791-2831-115473386340, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.802335] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017765, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078164} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.802704] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1046.803584] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54cb5b43-8722-424b-98ac-7d4567cd830b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.815571] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017766, 'name': RelocateVM_Task} progress is 20%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.831576] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] da3408a0-cce7-4252-be47-097f081d83c1/da3408a0-cce7-4252-be47-097f081d83c1.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1046.832285] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d9f9bec-f53e-4cbd-b194-0ee61e111d80 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.853658] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1046.853658] env[63345]: value = "task-1017770" [ 1046.853658] env[63345]: _type = "Task" [ 1046.853658] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.863897] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017770, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.919009] env[63345]: DEBUG oslo_vmware.api [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017754, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.933086] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017769, 'name': CreateVM_Task, 'duration_secs': 0.393561} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.933301] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1046.933792] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.933913] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.934300] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1046.934598] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a71b3c2-16b6-45db-beab-6ae4011718e7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.941024] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1046.941024] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b29005-ca69-ebf9-298b-e8cbf0a1e54b" [ 1046.941024] env[63345]: _type = "Task" [ 1046.941024] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.952516] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b29005-ca69-ebf9-298b-e8cbf0a1e54b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.095457] env[63345]: DEBUG oslo_concurrency.lockutils [req-30796f8a-4ec2-4327-a12f-b4014a0ec97a req-ce777622-6743-4532-9967-399e5bef9a83 service nova] Releasing lock "refresh_cache-95ef4f91-a618-4ae2-95ad-d027c031f239" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.164101] env[63345]: DEBUG oslo_concurrency.lockutils [None req-964b90c2-b015-4b26-8960-5d7a28bdf614 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "726332dd-8699-49a4-a9ea-b9cbfc159855" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.050s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.245957] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524aeafc-ad86-8791-2831-115473386340, 'name': SearchDatastore_Task, 'duration_secs': 0.014172} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.246348] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.246568] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1047.246859] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.247168] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.247254] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1047.247488] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8db401fd-71ee-4970-a529-25e01c0ca4ed {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.259413] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1047.259603] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1047.260377] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff964f32-7b63-464d-a9b0-1e1609d1c5f5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.267159] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1047.267159] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52a14647-458d-e6fc-5bac-78fb24bb41b4" [ 1047.267159] env[63345]: _type = "Task" [ 1047.267159] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.275396] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a14647-458d-e6fc-5bac-78fb24bb41b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.291695] env[63345]: DEBUG nova.network.neutron [req-6ad41b19-9243-4d58-b037-41377ee00700 req-72ab3a99-9806-4172-ad6a-4ff798c572ce service nova] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Updated VIF entry in instance network info cache for port a4151246-fc54-4f99-9110-a65de119bea6. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1047.292092] env[63345]: DEBUG nova.network.neutron [req-6ad41b19-9243-4d58-b037-41377ee00700 req-72ab3a99-9806-4172-ad6a-4ff798c572ce service nova] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Updating instance_info_cache with network_info: [{"id": "a4151246-fc54-4f99-9110-a65de119bea6", "address": "fa:16:3e:0f:cc:58", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4151246-fc", "ovs_interfaceid": "a4151246-fc54-4f99-9110-a65de119bea6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.317983] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017766, 'name': RelocateVM_Task, 'duration_secs': 0.719296} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.318833] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Volume attach. Driver type: vmdk {{(pid=63345) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1047.318833] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226146', 'volume_id': '20d77566-3841-4d48-8c1e-d94d3b3b3333', 'name': 'volume-20d77566-3841-4d48-8c1e-d94d3b3b3333', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bce78147-6f6d-47a2-84f3-482f59a8bb8e', 'attached_at': '', 'detached_at': '', 'volume_id': '20d77566-3841-4d48-8c1e-d94d3b3b3333', 'serial': '20d77566-3841-4d48-8c1e-d94d3b3b3333'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1047.319675] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253a87ab-b5ed-4259-aa28-79de0ab2e57f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.337617] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c57684d-0616-4629-8c47-bf8ecaaba2c9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.360455] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] volume-20d77566-3841-4d48-8c1e-d94d3b3b3333/volume-20d77566-3841-4d48-8c1e-d94d3b3b3333.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1047.360899] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-474a6969-231d-45dc-81ee-5cb68eab3aa1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.386541] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017770, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.388478] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1047.388478] env[63345]: value = "task-1017771" [ 1047.388478] env[63345]: _type = "Task" [ 1047.388478] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.396830] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017771, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.418511] env[63345]: DEBUG oslo_vmware.api [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017754, 'name': ReconfigVM_Task, 'duration_secs': 6.002487} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.418791] env[63345]: DEBUG oslo_concurrency.lockutils [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.419047] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Reconfigured VM to detach interface {{(pid=63345) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1984}} [ 1047.453242] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b29005-ca69-ebf9-298b-e8cbf0a1e54b, 'name': SearchDatastore_Task, 'duration_secs': 0.012039} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.454903] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.455170] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1047.455411] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.455566] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.455750] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1047.456040] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f39cc76-da04-4b7d-bf7c-618b9aeca2ee {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.466657] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1047.466858] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1047.467603] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9787333b-7118-46f9-9dc0-042a4b7263af {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.473296] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1047.473296] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52adfb07-e970-03d9-4b54-712039d55166" [ 1047.473296] env[63345]: _type = "Task" [ 1047.473296] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.481546] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52adfb07-e970-03d9-4b54-712039d55166, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.781819] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a14647-458d-e6fc-5bac-78fb24bb41b4, 'name': SearchDatastore_Task, 'duration_secs': 0.046208} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.787467] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72c834a1-91e4-4860-a99d-fc1e47cbd27d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.795148] env[63345]: DEBUG oslo_concurrency.lockutils [req-6ad41b19-9243-4d58-b037-41377ee00700 req-72ab3a99-9806-4172-ad6a-4ff798c572ce service nova] Releasing lock "refresh_cache-b3e0831b-b8f1-40c4-be01-71ed6484dbc0" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.795430] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1047.795430] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]521ebf33-87ee-0f9d-ecd6-3cabfc399fac" [ 1047.795430] env[63345]: _type = "Task" [ 1047.795430] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.804801] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521ebf33-87ee-0f9d-ecd6-3cabfc399fac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.871410] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017770, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.897889] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017771, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.984587] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52adfb07-e970-03d9-4b54-712039d55166, 'name': SearchDatastore_Task, 'duration_secs': 0.021681} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.985374] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc4b6a59-1326-4425-b1ec-0ce2a41430b1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.991238] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1047.991238] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]527af7ac-374e-83bc-7c8c-efea61361473" [ 1047.991238] env[63345]: _type = "Task" [ 1047.991238] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.998961] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]527af7ac-374e-83bc-7c8c-efea61361473, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.296205] env[63345]: DEBUG nova.compute.manager [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Stashing vm_state: stopped {{(pid=63345) _prep_resize /opt/stack/nova/nova/compute/manager.py:5953}} [ 1048.308768] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521ebf33-87ee-0f9d-ecd6-3cabfc399fac, 'name': SearchDatastore_Task, 'duration_secs': 0.036358} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.309047] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.309308] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] b3e0831b-b8f1-40c4-be01-71ed6484dbc0/b3e0831b-b8f1-40c4-be01-71ed6484dbc0.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1048.309682] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9569b1d-3cc3-4e22-9a8e-2781ba23d8b1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.320797] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1048.320797] env[63345]: value = "task-1017772" [ 1048.320797] env[63345]: _type = "Task" [ 1048.320797] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.329189] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017772, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.371074] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017770, 'name': ReconfigVM_Task, 'duration_secs': 1.34764} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.371409] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Reconfigured VM instance instance-00000069 to attach disk [datastore1] da3408a0-cce7-4252-be47-097f081d83c1/da3408a0-cce7-4252-be47-097f081d83c1.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1048.372167] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-87f50fc6-9744-401c-9825-8ba12a56ca7c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.379736] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1048.379736] env[63345]: value = "task-1017773" [ 1048.379736] env[63345]: _type = "Task" [ 1048.379736] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.389058] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017773, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.397861] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017771, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.502400] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]527af7ac-374e-83bc-7c8c-efea61361473, 'name': SearchDatastore_Task, 'duration_secs': 0.010876} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.502790] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.503081] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 5cefe8a6-4af0-47d4-84f5-1d579d0c9968/5cefe8a6-4af0-47d4-84f5-1d579d0c9968.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1048.503367] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-00d2aaed-fe05-4b47-ba6d-66de546bc208 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.511214] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1048.511214] env[63345]: value = "task-1017774" [ 1048.511214] env[63345]: _type = "Task" [ 1048.511214] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.522136] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017774, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.722819] env[63345]: DEBUG oslo_concurrency.lockutils [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1048.723064] env[63345]: DEBUG oslo_concurrency.lockutils [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquired lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.723270] env[63345]: DEBUG nova.network.neutron [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1048.826156] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.826460] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.834045] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017772, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495766} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.834210] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore1] b3e0831b-b8f1-40c4-be01-71ed6484dbc0/b3e0831b-b8f1-40c4-be01-71ed6484dbc0.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1048.834313] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1048.834561] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-19535acb-054a-45b3-823d-be0b39ee79b9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.843315] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1048.843315] env[63345]: value = "task-1017775" [ 1048.843315] env[63345]: _type = "Task" [ 1048.843315] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.854397] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017775, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.891135] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017773, 'name': Rename_Task, 'duration_secs': 0.148023} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.894777] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1048.895196] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60ab640c-7c54-48aa-90c0-a9909300c669 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.903803] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017771, 'name': ReconfigVM_Task, 'duration_secs': 1.044337} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.905398] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Reconfigured VM instance instance-0000006b to attach disk [datastore1] volume-20d77566-3841-4d48-8c1e-d94d3b3b3333/volume-20d77566-3841-4d48-8c1e-d94d3b3b3333.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1048.910618] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1048.910618] env[63345]: value = "task-1017776" [ 1048.910618] env[63345]: _type = "Task" [ 1048.910618] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.911275] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64596229-57a0-4560-a6ec-7d6a87ba4ff2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.933161] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017776, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.935380] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1048.935380] env[63345]: value = "task-1017777" [ 1048.935380] env[63345]: _type = "Task" [ 1048.935380] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.946934] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017777, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.022852] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017774, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.270735] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.270998] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.271230] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.271426] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.271604] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.273897] env[63345]: INFO nova.compute.manager [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Terminating instance [ 1049.331445] env[63345]: INFO nova.compute.claims [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1049.353370] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017775, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123366} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.353631] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1049.354460] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff982be-e47d-42df-ad69-e423cfe8f609 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.379261] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] b3e0831b-b8f1-40c4-be01-71ed6484dbc0/b3e0831b-b8f1-40c4-be01-71ed6484dbc0.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1049.382105] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a431ae8-fe9c-429c-b9c5-36a0d00d6ebf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.403940] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1049.403940] env[63345]: value = "task-1017778" [ 1049.403940] env[63345]: _type = "Task" [ 1049.403940] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.414251] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017778, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.432542] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017776, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.445574] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017777, 'name': ReconfigVM_Task, 'duration_secs': 0.242251} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.445906] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226146', 'volume_id': '20d77566-3841-4d48-8c1e-d94d3b3b3333', 'name': 'volume-20d77566-3841-4d48-8c1e-d94d3b3b3333', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bce78147-6f6d-47a2-84f3-482f59a8bb8e', 'attached_at': '', 'detached_at': '', 'volume_id': '20d77566-3841-4d48-8c1e-d94d3b3b3333', 'serial': '20d77566-3841-4d48-8c1e-d94d3b3b3333'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1049.446494] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2aac7535-31d2-4113-a5ec-b656bda2e541 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.454586] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1049.454586] env[63345]: value = "task-1017779" [ 1049.454586] env[63345]: _type = "Task" [ 1049.454586] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.462929] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017779, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.471437] env[63345]: INFO nova.network.neutron [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Port f9c53757-0ec3-4d99-9493-d12a48f28db3 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1049.471928] env[63345]: DEBUG nova.network.neutron [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Updating instance_info_cache with network_info: [{"id": "d6e5e759-86e1-4f76-9b65-19b2691780df", "address": "fa:16:3e:21:49:6e", "network": {"id": "b360ab0d-3deb-4632-a8d5-c1639db9e9e2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2015660260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33c28bfca4da460e8ca96dc7519204c8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f35e69ef-c2c8-4b8c-9887-33e97b242c0a", "external-id": "nsx-vlan-transportzone-969", "segmentation_id": 969, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6e5e759-86", "ovs_interfaceid": "d6e5e759-86e1-4f76-9b65-19b2691780df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.521953] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017774, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.659127} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.522236] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 5cefe8a6-4af0-47d4-84f5-1d579d0c9968/5cefe8a6-4af0-47d4-84f5-1d579d0c9968.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1049.522455] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1049.522718] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b038c8c7-3058-446c-9172-fca573fba313 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.528779] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1049.528779] env[63345]: value = "task-1017780" [ 1049.528779] env[63345]: _type = "Task" [ 1049.528779] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.536884] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017780, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.777586] env[63345]: DEBUG nova.compute.manager [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1049.777760] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1049.778667] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41cc9fba-720d-4503-817e-4cfbd407df19 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.787180] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1049.787409] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6af7a157-02c4-4077-ae8f-dd52b77786e3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.793925] env[63345]: DEBUG oslo_vmware.api [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 1049.793925] env[63345]: value = "task-1017781" [ 1049.793925] env[63345]: _type = "Task" [ 1049.793925] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.801905] env[63345]: DEBUG oslo_vmware.api [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017781, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.837210] env[63345]: INFO nova.compute.resource_tracker [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updating resource usage from migration 6995199b-b85e-43ee-9f2c-baf2244d91a8 [ 1049.914182] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017778, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.935170] env[63345]: DEBUG oslo_vmware.api [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017776, 'name': PowerOnVM_Task, 'duration_secs': 0.550096} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.935170] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1049.935248] env[63345]: DEBUG nova.compute.manager [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1049.936046] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8b44d1-c37b-4b10-b540-ef73d1136d9e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.966596] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017779, 'name': Rename_Task, 'duration_secs': 0.473443} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.966888] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1049.967168] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8df3643-3d7d-4fa4-affc-85d738d93017 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.975272] env[63345]: DEBUG oslo_concurrency.lockutils [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Releasing lock "refresh_cache-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1049.978822] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1049.978822] env[63345]: value = "task-1017782" [ 1049.978822] env[63345]: _type = "Task" [ 1049.978822] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.988812] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017782, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.035784] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ea56f2-76a8-42f6-8b67-117214858787 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.043751] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017780, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068912} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.045588] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1050.046376] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2ea0d5-e498-432d-878c-74dcf1ce15d6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.049386] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7838a72a-354c-427c-9f8a-25b319817e3e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.071099] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] 5cefe8a6-4af0-47d4-84f5-1d579d0c9968/5cefe8a6-4af0-47d4-84f5-1d579d0c9968.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1050.096061] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9af7b389-df63-4160-b2c1-4d7a0d5c128b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.110924] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-627259ef-b39b-4aff-8f81-f4caec5da590 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.121212] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4868e64-d2aa-4a5f-81dc-57de10bca248 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.125231] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1050.125231] env[63345]: value = "task-1017783" [ 1050.125231] env[63345]: _type = "Task" [ 1050.125231] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.136008] env[63345]: DEBUG nova.compute.provider_tree [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1050.142305] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017783, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.305096] env[63345]: DEBUG oslo_vmware.api [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017781, 'name': PowerOffVM_Task, 'duration_secs': 0.486569} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.305311] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1050.305492] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1050.305749] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9c8fd51-d726-4178-aa9f-bd4d22839aab {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.384694] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1050.385103] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1050.385358] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Deleting the datastore file [datastore2] 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1050.385901] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b5f23529-9cb6-4cc3-a77d-a36245071e95 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.393434] env[63345]: DEBUG oslo_vmware.api [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 1050.393434] env[63345]: value = "task-1017785" [ 1050.393434] env[63345]: _type = "Task" [ 1050.393434] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.405221] env[63345]: DEBUG oslo_vmware.api [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017785, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.414847] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017778, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.454934] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.482082] env[63345]: DEBUG oslo_concurrency.lockutils [None req-98f2f512-c809-4bf9-a76a-91523a39ee46 tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "interface-7057cdfc-a6d9-4e52-b650-6a5709d5f8c2-f9c53757-0ec3-4d99-9493-d12a48f28db3" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.662s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.492393] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017782, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.636284] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017783, 'name': ReconfigVM_Task, 'duration_secs': 0.424522} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.636581] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Reconfigured VM instance instance-0000006d to attach disk [datastore2] 5cefe8a6-4af0-47d4-84f5-1d579d0c9968/5cefe8a6-4af0-47d4-84f5-1d579d0c9968.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1050.637229] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5196e921-6f5d-408f-ae39-4ffdd5bd81b1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.639424] env[63345]: DEBUG nova.scheduler.client.report [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1050.650582] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1050.650582] env[63345]: value = "task-1017786" [ 1050.650582] env[63345]: _type = "Task" [ 1050.650582] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.659719] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017786, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.903851] env[63345]: DEBUG oslo_vmware.api [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017785, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.285266} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.904110] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1050.904318] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1050.904511] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1050.904710] env[63345]: INFO nova.compute.manager [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1050.904991] env[63345]: DEBUG oslo.service.loopingcall [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1050.905297] env[63345]: DEBUG nova.compute.manager [-] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1050.905410] env[63345]: DEBUG nova.network.neutron [-] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1050.915763] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017778, 'name': ReconfigVM_Task, 'duration_secs': 1.050933} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.916059] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Reconfigured VM instance instance-0000006c to attach disk [datastore1] b3e0831b-b8f1-40c4-be01-71ed6484dbc0/b3e0831b-b8f1-40c4-be01-71ed6484dbc0.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1050.916691] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-16f48ee1-d37b-4197-918d-ebbab78d42d0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.924487] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1050.924487] env[63345]: value = "task-1017787" [ 1050.924487] env[63345]: _type = "Task" [ 1050.924487] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.934153] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017787, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.991770] env[63345]: DEBUG oslo_vmware.api [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017782, 'name': PowerOnVM_Task, 'duration_secs': 0.794919} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.992074] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1050.992293] env[63345]: INFO nova.compute.manager [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Took 7.77 seconds to spawn the instance on the hypervisor. [ 1050.992481] env[63345]: DEBUG nova.compute.manager [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1050.993316] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3685c0d7-c5ba-4660-a2da-d6c798cbb22b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.147019] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.318s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.147019] env[63345]: INFO nova.compute.manager [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Migrating [ 1051.152347] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.698s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.152543] env[63345]: DEBUG nova.objects.instance [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63345) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1051.172434] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017786, 'name': Rename_Task, 'duration_secs': 0.329454} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.174993] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1051.175348] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6af357e-f38f-4eb8-aa03-519ab9349067 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.185511] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1051.185511] env[63345]: value = "task-1017788" [ 1051.185511] env[63345]: _type = "Task" [ 1051.185511] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.195023] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017788, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.440130] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017787, 'name': Rename_Task, 'duration_secs': 0.329357} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.440130] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1051.440130] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-74a5b9f4-d74c-4e56-b28c-05360a253020 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.447206] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1051.447206] env[63345]: value = "task-1017789" [ 1051.447206] env[63345]: _type = "Task" [ 1051.447206] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.455780] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017789, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.518734] env[63345]: INFO nova.compute.manager [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Took 20.17 seconds to build instance. [ 1051.668999] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.669364] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.669686] env[63345]: DEBUG nova.network.neutron [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1051.699791] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017788, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.748109] env[63345]: DEBUG nova.compute.manager [req-9081f418-dfea-4d9e-894e-ec2c23cbeb68 req-e260798c-4510-4023-95e0-eb1db5fa996d service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Received event network-vif-deleted-d6e5e759-86e1-4f76-9b65-19b2691780df {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1051.748509] env[63345]: INFO nova.compute.manager [req-9081f418-dfea-4d9e-894e-ec2c23cbeb68 req-e260798c-4510-4023-95e0-eb1db5fa996d service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Neutron deleted interface d6e5e759-86e1-4f76-9b65-19b2691780df; detaching it from the instance and deleting it from the info cache [ 1051.748853] env[63345]: DEBUG nova.network.neutron [req-9081f418-dfea-4d9e-894e-ec2c23cbeb68 req-e260798c-4510-4023-95e0-eb1db5fa996d service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.768079] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "da3408a0-cce7-4252-be47-097f081d83c1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.768359] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "da3408a0-cce7-4252-be47-097f081d83c1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.769209] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "da3408a0-cce7-4252-be47-097f081d83c1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.769209] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "da3408a0-cce7-4252-be47-097f081d83c1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.769209] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "da3408a0-cce7-4252-be47-097f081d83c1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.775027] env[63345]: INFO nova.compute.manager [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Terminating instance [ 1051.956809] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017789, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.021854] env[63345]: DEBUG oslo_concurrency.lockutils [None req-73a5a717-9b1e-41c1-9c56-b077bccf504c tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "bce78147-6f6d-47a2-84f3-482f59a8bb8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.679s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.175029] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ab84f909-20c6-45aa-b427-942912ed8787 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.205887] env[63345]: DEBUG oslo_vmware.api [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017788, 'name': PowerOnVM_Task, 'duration_secs': 0.598916} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.208055] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1052.208055] env[63345]: INFO nova.compute.manager [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Took 5.91 seconds to spawn the instance on the hypervisor. [ 1052.208055] env[63345]: DEBUG nova.compute.manager [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1052.208055] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac57f25-ae81-42a6-a58b-ce3c2bd279db {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.224749] env[63345]: DEBUG nova.network.neutron [-] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.252829] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a2a24c32-a80a-4473-8176-bda677bf480c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.270455] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acbe9a8a-26a3-47ff-a277-fb22aa973b5d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.285471] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "refresh_cache-da3408a0-cce7-4252-be47-097f081d83c1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1052.285943] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquired lock "refresh_cache-da3408a0-cce7-4252-be47-097f081d83c1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.286265] env[63345]: DEBUG nova.network.neutron [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1052.315558] env[63345]: DEBUG nova.compute.manager [req-9081f418-dfea-4d9e-894e-ec2c23cbeb68 req-e260798c-4510-4023-95e0-eb1db5fa996d service nova] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Detach interface failed, port_id=d6e5e759-86e1-4f76-9b65-19b2691780df, reason: Instance 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1052.460264] env[63345]: DEBUG oslo_vmware.api [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017789, 'name': PowerOnVM_Task, 'duration_secs': 0.908006} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.460700] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1052.460856] env[63345]: INFO nova.compute.manager [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Took 8.48 seconds to spawn the instance on the hypervisor. [ 1052.461031] env[63345]: DEBUG nova.compute.manager [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1052.461840] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be58503d-40ca-49cb-a3ae-3dd076f74949 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.482857] env[63345]: DEBUG nova.network.neutron [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updating instance_info_cache with network_info: [{"id": "114e38e0-a558-4242-ad5b-4aac063dcb72", "address": "fa:16:3e:bb:2c:f8", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap114e38e0-a5", "ovs_interfaceid": "114e38e0-a558-4242-ad5b-4aac063dcb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.730254] env[63345]: INFO nova.compute.manager [-] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Took 1.82 seconds to deallocate network for instance. [ 1052.730420] env[63345]: INFO nova.compute.manager [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Took 11.88 seconds to build instance. [ 1052.809135] env[63345]: DEBUG nova.network.neutron [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1052.863548] env[63345]: DEBUG nova.network.neutron [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.979682] env[63345]: INFO nova.compute.manager [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Took 13.34 seconds to build instance. [ 1052.986148] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.185534] env[63345]: INFO nova.compute.manager [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Rebuilding instance [ 1053.220874] env[63345]: DEBUG nova.compute.manager [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1053.221794] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13edd3ea-b50d-46bc-a268-4d418291b0ae {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.233735] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcae6e9e-8650-44b1-91f4-9cbf6b91b423 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Lock "5cefe8a6-4af0-47d4-84f5-1d579d0c9968" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.400s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.236617] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.236938] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.237178] env[63345]: DEBUG nova.objects.instance [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lazy-loading 'resources' on Instance uuid 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1053.366509] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Releasing lock "refresh_cache-da3408a0-cce7-4252-be47-097f081d83c1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.366970] env[63345]: DEBUG nova.compute.manager [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1053.367343] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1053.368255] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7addde37-464c-4392-8f8e-41aacef9f730 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.377613] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1053.377952] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-21940f64-c648-464c-b4fd-d2d112cdfa51 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.386098] env[63345]: DEBUG oslo_vmware.api [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1053.386098] env[63345]: value = "task-1017790" [ 1053.386098] env[63345]: _type = "Task" [ 1053.386098] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.396567] env[63345]: DEBUG oslo_vmware.api [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017790, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.482049] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ed84f6cf-2414-4469-8c19-fca93834aa24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "b3e0831b-b8f1-40c4-be01-71ed6484dbc0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.853s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.767390] env[63345]: DEBUG nova.compute.manager [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Stashing vm_state: active {{(pid=63345) _prep_resize /opt/stack/nova/nova/compute/manager.py:5953}} [ 1053.885123] env[63345]: DEBUG nova.compute.manager [req-e08178db-26a2-4a0f-9c08-28fc75e58322 req-34d3597c-0012-4261-b659-533f98be92f6 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Received event network-changed-9b0555db-b627-44ae-8812-42415d554cde {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1053.885355] env[63345]: DEBUG nova.compute.manager [req-e08178db-26a2-4a0f-9c08-28fc75e58322 req-34d3597c-0012-4261-b659-533f98be92f6 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Refreshing instance network info cache due to event network-changed-9b0555db-b627-44ae-8812-42415d554cde. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1053.885589] env[63345]: DEBUG oslo_concurrency.lockutils [req-e08178db-26a2-4a0f-9c08-28fc75e58322 req-34d3597c-0012-4261-b659-533f98be92f6 service nova] Acquiring lock "refresh_cache-869f8110-6490-4a47-955a-0ce085f826af" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1053.885762] env[63345]: DEBUG oslo_concurrency.lockutils [req-e08178db-26a2-4a0f-9c08-28fc75e58322 req-34d3597c-0012-4261-b659-533f98be92f6 service nova] Acquired lock "refresh_cache-869f8110-6490-4a47-955a-0ce085f826af" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.885910] env[63345]: DEBUG nova.network.neutron [req-e08178db-26a2-4a0f-9c08-28fc75e58322 req-34d3597c-0012-4261-b659-533f98be92f6 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Refreshing network info cache for port 9b0555db-b627-44ae-8812-42415d554cde {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1053.901875] env[63345]: DEBUG oslo_vmware.api [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017790, 'name': PowerOffVM_Task, 'duration_secs': 0.186677} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.902172] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1053.902966] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1053.902966] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b634614-9ae1-46ca-93ae-2abaf3d909a2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.935321] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1053.935574] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Deleting contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1053.935808] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Deleting the datastore file [datastore1] da3408a0-cce7-4252-be47-097f081d83c1 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1053.936065] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6aa7e16d-b3cd-40af-bf4d-19fd4db45cd7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.943141] env[63345]: DEBUG oslo_vmware.api [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1053.943141] env[63345]: value = "task-1017792" [ 1053.943141] env[63345]: _type = "Task" [ 1053.943141] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.954930] env[63345]: DEBUG oslo_vmware.api [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017792, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.956501] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5addfe-eb4b-42fa-9e25-8c3684ebbb87 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.963791] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-447d6091-680f-45d6-8094-0a57a49f279d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.000196] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e69e4ec3-147e-4ace-8f28-fedb07f908db {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.009297] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d6d564-c756-4438-b7ce-3e687853fd6c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.024571] env[63345]: DEBUG nova.compute.provider_tree [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.238527] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1054.240590] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a422cb4-6f5b-4ae6-8938-e7f6569267e8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.243609] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b2540c-d095-4e57-99e5-ee76d8136109 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.250754] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-46a34212-a645-479b-b69f-2457655b2d9d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Suspending the VM {{(pid=63345) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1163}} [ 1054.252045] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-78d8e251-0609-44d1-baaa-a57d6bf47895 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.253566] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1054.253566] env[63345]: value = "task-1017793" [ 1054.253566] env[63345]: _type = "Task" [ 1054.253566] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.258253] env[63345]: DEBUG oslo_vmware.api [None req-46a34212-a645-479b-b69f-2457655b2d9d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1054.258253] env[63345]: value = "task-1017794" [ 1054.258253] env[63345]: _type = "Task" [ 1054.258253] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.264732] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.270650] env[63345]: DEBUG oslo_vmware.api [None req-46a34212-a645-479b-b69f-2457655b2d9d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017794, 'name': SuspendVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.290149] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.456078] env[63345]: DEBUG oslo_vmware.api [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017792, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109821} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.456655] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1054.456655] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Deleted contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1054.456833] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1054.456914] env[63345]: INFO nova.compute.manager [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1054.457183] env[63345]: DEBUG oslo.service.loopingcall [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1054.457387] env[63345]: DEBUG nova.compute.manager [-] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1054.457485] env[63345]: DEBUG nova.network.neutron [-] [instance: da3408a0-cce7-4252-be47-097f081d83c1] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1054.472586] env[63345]: DEBUG nova.network.neutron [-] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1054.506314] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2036cd-3303-44b4-8258-4ef78cd44d4c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.529111] env[63345]: DEBUG nova.scheduler.client.report [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1054.532978] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updating instance '726332dd-8699-49a4-a9ea-b9cbfc159855' progress to 0 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1054.608429] env[63345]: DEBUG nova.network.neutron [req-e08178db-26a2-4a0f-9c08-28fc75e58322 req-34d3597c-0012-4261-b659-533f98be92f6 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Updated VIF entry in instance network info cache for port 9b0555db-b627-44ae-8812-42415d554cde. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1054.608821] env[63345]: DEBUG nova.network.neutron [req-e08178db-26a2-4a0f-9c08-28fc75e58322 req-34d3597c-0012-4261-b659-533f98be92f6 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Updating instance_info_cache with network_info: [{"id": "9b0555db-b627-44ae-8812-42415d554cde", "address": "fa:16:3e:4c:4f:70", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b0555db-b6", "ovs_interfaceid": "9b0555db-b627-44ae-8812-42415d554cde", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.769169] env[63345]: DEBUG oslo_vmware.api [None req-46a34212-a645-479b-b69f-2457655b2d9d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017794, 'name': SuspendVM_Task} progress is 62%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.772479] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017793, 'name': PowerOffVM_Task, 'duration_secs': 0.121005} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.772806] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1054.773210] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1054.774073] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5971abf5-f52c-42a5-97c0-dc76049e04b7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.781903] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1054.782183] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e594c8e-2550-4bff-8fea-05f62d4f8fa5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.812839] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1054.813154] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1054.813354] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Deleting the datastore file [datastore2] 5cefe8a6-4af0-47d4-84f5-1d579d0c9968 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1054.813639] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-52f4c38f-7f51-4462-be5a-87d337400542 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.820926] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1054.820926] env[63345]: value = "task-1017796" [ 1054.820926] env[63345]: _type = "Task" [ 1054.820926] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.829414] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017796, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.975054] env[63345]: DEBUG nova.network.neutron [-] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.034070] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.797s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.036685] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.747s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.039867] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1055.043096] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd07f736-6e30-41fe-a872-796612ae598a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.051803] env[63345]: DEBUG oslo_vmware.api [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1055.051803] env[63345]: value = "task-1017797" [ 1055.051803] env[63345]: _type = "Task" [ 1055.051803] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.063845] env[63345]: INFO nova.scheduler.client.report [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Deleted allocations for instance 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2 [ 1055.064923] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] VM already powered off {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1055.065139] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updating instance '726332dd-8699-49a4-a9ea-b9cbfc159855' progress to 17 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1055.111622] env[63345]: DEBUG oslo_concurrency.lockutils [req-e08178db-26a2-4a0f-9c08-28fc75e58322 req-34d3597c-0012-4261-b659-533f98be92f6 service nova] Releasing lock "refresh_cache-869f8110-6490-4a47-955a-0ce085f826af" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1055.111904] env[63345]: DEBUG nova.compute.manager [req-e08178db-26a2-4a0f-9c08-28fc75e58322 req-34d3597c-0012-4261-b659-533f98be92f6 service nova] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Received event network-changed-4829f314-cace-49cc-b77a-016ee4b1c7e6 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1055.112104] env[63345]: DEBUG nova.compute.manager [req-e08178db-26a2-4a0f-9c08-28fc75e58322 req-34d3597c-0012-4261-b659-533f98be92f6 service nova] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Refreshing instance network info cache due to event network-changed-4829f314-cace-49cc-b77a-016ee4b1c7e6. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1055.112531] env[63345]: DEBUG oslo_concurrency.lockutils [req-e08178db-26a2-4a0f-9c08-28fc75e58322 req-34d3597c-0012-4261-b659-533f98be92f6 service nova] Acquiring lock "refresh_cache-bce78147-6f6d-47a2-84f3-482f59a8bb8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1055.112531] env[63345]: DEBUG oslo_concurrency.lockutils [req-e08178db-26a2-4a0f-9c08-28fc75e58322 req-34d3597c-0012-4261-b659-533f98be92f6 service nova] Acquired lock "refresh_cache-bce78147-6f6d-47a2-84f3-482f59a8bb8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.112661] env[63345]: DEBUG nova.network.neutron [req-e08178db-26a2-4a0f-9c08-28fc75e58322 req-34d3597c-0012-4261-b659-533f98be92f6 service nova] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Refreshing network info cache for port 4829f314-cace-49cc-b77a-016ee4b1c7e6 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1055.269103] env[63345]: DEBUG oslo_vmware.api [None req-46a34212-a645-479b-b69f-2457655b2d9d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017794, 'name': SuspendVM_Task} progress is 62%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.329830] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017796, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138038} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.330126] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1055.330343] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1055.330530] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1055.477889] env[63345]: INFO nova.compute.manager [-] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Took 1.02 seconds to deallocate network for instance. [ 1055.546095] env[63345]: INFO nova.compute.claims [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1055.574436] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1055.574687] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1055.574839] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1055.575103] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1055.575279] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1055.575436] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1055.575682] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1055.575810] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1055.575993] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1055.576174] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1055.576356] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1055.582198] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7c20033e-3753-4fd1-b8f7-3a44f2382dbd tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "7057cdfc-a6d9-4e52-b650-6a5709d5f8c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.311s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.583328] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e30290b-8269-457b-a4b5-1c0e30c084b6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.602880] env[63345]: DEBUG oslo_vmware.api [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1055.602880] env[63345]: value = "task-1017798" [ 1055.602880] env[63345]: _type = "Task" [ 1055.602880] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.613220] env[63345]: DEBUG oslo_vmware.api [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017798, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.769595] env[63345]: DEBUG oslo_vmware.api [None req-46a34212-a645-479b-b69f-2457655b2d9d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017794, 'name': SuspendVM_Task, 'duration_secs': 1.209103} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.769891] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-46a34212-a645-479b-b69f-2457655b2d9d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Suspended the VM {{(pid=63345) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1167}} [ 1055.770115] env[63345]: DEBUG nova.compute.manager [None req-46a34212-a645-479b-b69f-2457655b2d9d tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1055.770917] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37a3f9e-20dc-4c4c-b134-d13a93b94894 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.822343] env[63345]: DEBUG nova.network.neutron [req-e08178db-26a2-4a0f-9c08-28fc75e58322 req-34d3597c-0012-4261-b659-533f98be92f6 service nova] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Updated VIF entry in instance network info cache for port 4829f314-cace-49cc-b77a-016ee4b1c7e6. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1055.822724] env[63345]: DEBUG nova.network.neutron [req-e08178db-26a2-4a0f-9c08-28fc75e58322 req-34d3597c-0012-4261-b659-533f98be92f6 service nova] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Updating instance_info_cache with network_info: [{"id": "4829f314-cace-49cc-b77a-016ee4b1c7e6", "address": "fa:16:3e:27:64:94", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4829f314-ca", "ovs_interfaceid": "4829f314-cace-49cc-b77a-016ee4b1c7e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.984370] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.028635] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "dd624e54-bd5b-4660-88a1-9d6f36560421" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.028984] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "dd624e54-bd5b-4660-88a1-9d6f36560421" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.029264] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "dd624e54-bd5b-4660-88a1-9d6f36560421-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.029496] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "dd624e54-bd5b-4660-88a1-9d6f36560421-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.029748] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "dd624e54-bd5b-4660-88a1-9d6f36560421-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.031957] env[63345]: INFO nova.compute.manager [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Terminating instance [ 1056.053239] env[63345]: INFO nova.compute.resource_tracker [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Updating resource usage from migration 0057592c-761e-46cb-9854-74b439bd8605 [ 1056.114759] env[63345]: DEBUG oslo_vmware.api [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017798, 'name': ReconfigVM_Task, 'duration_secs': 0.38188} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.115102] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updating instance '726332dd-8699-49a4-a9ea-b9cbfc159855' progress to 33 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1056.231973] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50edf50-94e4-4174-bb55-38c1990f19eb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.241831] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b44099-afc5-488d-b7b4-7156c64c1f09 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.275324] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c727db-119d-47cd-a8e2-f41261d9e3d0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.288384] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2fd69a1-eadd-4452-a094-0b50067588a1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.303619] env[63345]: DEBUG nova.compute.provider_tree [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.325395] env[63345]: DEBUG oslo_concurrency.lockutils [req-e08178db-26a2-4a0f-9c08-28fc75e58322 req-34d3597c-0012-4261-b659-533f98be92f6 service nova] Releasing lock "refresh_cache-bce78147-6f6d-47a2-84f3-482f59a8bb8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1056.363945] env[63345]: DEBUG nova.virt.hardware [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1056.364253] env[63345]: DEBUG nova.virt.hardware [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1056.364431] env[63345]: DEBUG nova.virt.hardware [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1056.364672] env[63345]: DEBUG nova.virt.hardware [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1056.364855] env[63345]: DEBUG nova.virt.hardware [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1056.365026] env[63345]: DEBUG nova.virt.hardware [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1056.365245] env[63345]: DEBUG nova.virt.hardware [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1056.365413] env[63345]: DEBUG nova.virt.hardware [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1056.365590] env[63345]: DEBUG nova.virt.hardware [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1056.365761] env[63345]: DEBUG nova.virt.hardware [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1056.365944] env[63345]: DEBUG nova.virt.hardware [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1056.366814] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9285f51c-e889-4c22-af96-25091815b363 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.375632] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9df63a-dd5b-48f6-9e6b-528687900608 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.389202] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Instance VIF info [] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1056.394799] env[63345]: DEBUG oslo.service.loopingcall [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1056.395056] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1056.395272] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b0fda27b-1d2c-4373-9a71-31f32b7e2c7c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.415367] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1056.415367] env[63345]: value = "task-1017799" [ 1056.415367] env[63345]: _type = "Task" [ 1056.415367] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.425046] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017799, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.538788] env[63345]: DEBUG nova.compute.manager [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1056.539022] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1056.539931] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11277277-5635-4d9d-a93a-60c0886a819d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.546896] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1056.547225] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c70e32a5-e7ba-47fb-9b97-a7090a81a779 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.555467] env[63345]: DEBUG oslo_vmware.api [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 1056.555467] env[63345]: value = "task-1017800" [ 1056.555467] env[63345]: _type = "Task" [ 1056.555467] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.563879] env[63345]: DEBUG oslo_vmware.api [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017800, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.623203] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1056.623477] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1056.623646] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1056.623872] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1056.624057] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1056.624223] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1056.624441] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1056.624609] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1056.624788] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1056.624963] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1056.625177] env[63345]: DEBUG nova.virt.hardware [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1056.630469] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Reconfiguring VM instance instance-00000059 to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1056.630798] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbd654a8-ef02-4637-aa5d-74c1cf0acef8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.651441] env[63345]: DEBUG oslo_vmware.api [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1056.651441] env[63345]: value = "task-1017801" [ 1056.651441] env[63345]: _type = "Task" [ 1056.651441] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.663058] env[63345]: DEBUG oslo_vmware.api [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017801, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.783445] env[63345]: DEBUG oslo_concurrency.lockutils [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "b3e0831b-b8f1-40c4-be01-71ed6484dbc0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.783801] env[63345]: DEBUG oslo_concurrency.lockutils [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "b3e0831b-b8f1-40c4-be01-71ed6484dbc0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.784070] env[63345]: DEBUG oslo_concurrency.lockutils [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "b3e0831b-b8f1-40c4-be01-71ed6484dbc0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.784361] env[63345]: DEBUG oslo_concurrency.lockutils [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "b3e0831b-b8f1-40c4-be01-71ed6484dbc0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.784487] env[63345]: DEBUG oslo_concurrency.lockutils [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "b3e0831b-b8f1-40c4-be01-71ed6484dbc0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.786910] env[63345]: INFO nova.compute.manager [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Terminating instance [ 1056.806547] env[63345]: DEBUG nova.scheduler.client.report [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1056.928920] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017799, 'name': CreateVM_Task, 'duration_secs': 0.312631} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.929198] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1056.929582] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1056.929755] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.930140] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1056.930431] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11c4035a-901c-4eaa-8a6f-d0ac22069095 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.936031] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1056.936031] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52e35bd9-fde7-fa71-4121-b36c0769e1d2" [ 1056.936031] env[63345]: _type = "Task" [ 1056.936031] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.944827] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e35bd9-fde7-fa71-4121-b36c0769e1d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.067060] env[63345]: DEBUG oslo_vmware.api [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017800, 'name': PowerOffVM_Task, 'duration_secs': 0.232026} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.067364] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1057.067588] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1057.067913] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ed1ecf6f-b244-4b4c-aba5-7df7001d2b07 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.160186] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1057.160424] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1057.160613] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Deleting the datastore file [datastore2] dd624e54-bd5b-4660-88a1-9d6f36560421 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.164038] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-192d45ee-2aa6-4868-8b04-2cfb4c86e216 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.166055] env[63345]: DEBUG oslo_vmware.api [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017801, 'name': ReconfigVM_Task, 'duration_secs': 0.206676} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.166354] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Reconfigured VM instance instance-00000059 to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1057.167606] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fdca213-66f7-4969-a81d-98b132a27df9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.172124] env[63345]: DEBUG oslo_vmware.api [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for the task: (returnval){ [ 1057.172124] env[63345]: value = "task-1017803" [ 1057.172124] env[63345]: _type = "Task" [ 1057.172124] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.195245] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] 726332dd-8699-49a4-a9ea-b9cbfc159855/726332dd-8699-49a4-a9ea-b9cbfc159855.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1057.196163] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f212ba7d-d854-4f68-888e-a34206c67ff6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.213447] env[63345]: DEBUG oslo_vmware.api [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017803, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.221218] env[63345]: DEBUG oslo_vmware.api [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1057.221218] env[63345]: value = "task-1017804" [ 1057.221218] env[63345]: _type = "Task" [ 1057.221218] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.235827] env[63345]: DEBUG oslo_vmware.api [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017804, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.291891] env[63345]: DEBUG nova.compute.manager [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1057.292224] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1057.293258] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc3e7e2-3055-409e-91b5-5102c070e563 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.301690] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1057.302029] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-edd4e9fe-eca2-4311-8f2b-3d10a7ed8666 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.311916] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.275s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.312181] env[63345]: INFO nova.compute.manager [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Migrating [ 1057.319297] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.335s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.319532] env[63345]: DEBUG nova.objects.instance [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lazy-loading 'resources' on Instance uuid da3408a0-cce7-4252-be47-097f081d83c1 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1057.402640] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1057.402983] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Deleting contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1057.403234] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleting the datastore file [datastore1] b3e0831b-b8f1-40c4-be01-71ed6484dbc0 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.403536] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8ead96d-5358-4361-82af-a0d548594eb5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.412727] env[63345]: DEBUG oslo_vmware.api [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1057.412727] env[63345]: value = "task-1017806" [ 1057.412727] env[63345]: _type = "Task" [ 1057.412727] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.425302] env[63345]: DEBUG oslo_vmware.api [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017806, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.450104] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e35bd9-fde7-fa71-4121-b36c0769e1d2, 'name': SearchDatastore_Task, 'duration_secs': 0.01075} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.450528] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1057.450815] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1057.451067] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.451231] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.451415] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1057.454329] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34708830-3744-4576-99eb-b5cc82337d83 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.465759] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1057.465960] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1057.466929] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15a760d8-16f4-4774-9784-abb13cb8e51a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.479505] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1057.479505] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52fcdf5b-0361-6cf4-7e3a-f76e18a2c6d9" [ 1057.479505] env[63345]: _type = "Task" [ 1057.479505] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.489073] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52fcdf5b-0361-6cf4-7e3a-f76e18a2c6d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.554362] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f73ced0-6b7b-4454-8885-7860892db568 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.563578] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf58492e-e788-4e9f-9286-0f5592e6e88c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.595993] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8008f53b-1066-4c5d-b6fa-4639270d4ae6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.605451] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40608598-f122-44db-a36f-75da12018dc9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.623652] env[63345]: DEBUG nova.compute.provider_tree [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1057.683263] env[63345]: DEBUG oslo_vmware.api [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Task: {'id': task-1017803, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108781} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.683536] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1057.683730] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1057.683920] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1057.684119] env[63345]: INFO nova.compute.manager [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1057.684374] env[63345]: DEBUG oslo.service.loopingcall [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1057.684584] env[63345]: DEBUG nova.compute.manager [-] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1057.684681] env[63345]: DEBUG nova.network.neutron [-] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1057.732024] env[63345]: DEBUG oslo_vmware.api [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017804, 'name': ReconfigVM_Task, 'duration_secs': 0.302994} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.732380] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Reconfigured VM instance instance-00000059 to attach disk [datastore2] 726332dd-8699-49a4-a9ea-b9cbfc159855/726332dd-8699-49a4-a9ea-b9cbfc159855.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1057.732625] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updating instance '726332dd-8699-49a4-a9ea-b9cbfc159855' progress to 50 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1057.828565] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "refresh_cache-bce78147-6f6d-47a2-84f3-482f59a8bb8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.828814] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquired lock "refresh_cache-bce78147-6f6d-47a2-84f3-482f59a8bb8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.829016] env[63345]: DEBUG nova.network.neutron [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1057.928214] env[63345]: DEBUG oslo_vmware.api [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017806, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.331551} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.928474] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1057.928672] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Deleted contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1057.928858] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1057.929110] env[63345]: INFO nova.compute.manager [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1057.929400] env[63345]: DEBUG oslo.service.loopingcall [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1057.929541] env[63345]: DEBUG nova.compute.manager [-] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1057.929636] env[63345]: DEBUG nova.network.neutron [-] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1057.986529] env[63345]: DEBUG nova.compute.manager [req-12f1d9a4-b0d4-4945-b805-8597df733d2a req-54b6b3e4-25d4-4ae7-ab9b-8c6231d06c02 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Received event network-vif-deleted-8a3e5f64-f812-4c1b-a9e0-b8b3146a1467 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1057.986707] env[63345]: INFO nova.compute.manager [req-12f1d9a4-b0d4-4945-b805-8597df733d2a req-54b6b3e4-25d4-4ae7-ab9b-8c6231d06c02 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Neutron deleted interface 8a3e5f64-f812-4c1b-a9e0-b8b3146a1467; detaching it from the instance and deleting it from the info cache [ 1057.986945] env[63345]: DEBUG nova.network.neutron [req-12f1d9a4-b0d4-4945-b805-8597df733d2a req-54b6b3e4-25d4-4ae7-ab9b-8c6231d06c02 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.995399] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52fcdf5b-0361-6cf4-7e3a-f76e18a2c6d9, 'name': SearchDatastore_Task, 'duration_secs': 0.009857} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.996651] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01e5e284-fb13-44c4-b755-898796d79d69 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.002738] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1058.002738] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5297adec-b4fa-1459-55d7-7555567087a6" [ 1058.002738] env[63345]: _type = "Task" [ 1058.002738] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.012429] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5297adec-b4fa-1459-55d7-7555567087a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.149437] env[63345]: ERROR nova.scheduler.client.report [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [req-a9af1dc2-32ff-46ea-ad37-09ee512c77c6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a9af1dc2-32ff-46ea-ad37-09ee512c77c6"}]} [ 1058.159476] env[63345]: DEBUG nova.compute.manager [req-bf4111e5-dcc3-4ec5-bc0c-48a27af4c59d req-d4568578-a614-4062-8378-63daa75f6cf0 service nova] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Received event network-vif-deleted-a4151246-fc54-4f99-9110-a65de119bea6 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1058.159856] env[63345]: INFO nova.compute.manager [req-bf4111e5-dcc3-4ec5-bc0c-48a27af4c59d req-d4568578-a614-4062-8378-63daa75f6cf0 service nova] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Neutron deleted interface a4151246-fc54-4f99-9110-a65de119bea6; detaching it from the instance and deleting it from the info cache [ 1058.159856] env[63345]: DEBUG nova.network.neutron [req-bf4111e5-dcc3-4ec5-bc0c-48a27af4c59d req-d4568578-a614-4062-8378-63daa75f6cf0 service nova] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.165935] env[63345]: DEBUG nova.scheduler.client.report [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1058.179275] env[63345]: DEBUG nova.scheduler.client.report [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1058.179503] env[63345]: DEBUG nova.compute.provider_tree [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1058.190823] env[63345]: DEBUG nova.scheduler.client.report [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1058.213151] env[63345]: DEBUG nova.scheduler.client.report [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1058.238944] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c66418e-4fb8-4726-875b-a3a613729b80 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.258899] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0c8a70-1f71-4873-ad82-8793d3d6a2d7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.278523] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updating instance '726332dd-8699-49a4-a9ea-b9cbfc159855' progress to 67 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1058.440079] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f770c36-8612-4445-87a8-a1a1d9f82c6d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.451348] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc0c1701-c950-4ca7-bca2-3fe17f838ed6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.482116] env[63345]: DEBUG nova.network.neutron [-] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.484181] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e84bfc9-410b-4955-82d8-dd4df8a001d4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.491786] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c88286bc-5d00-4b40-8b96-31af26a3e1e3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.497471] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb39a587-7eaa-4aee-bb59-9cef27e6f134 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.516036] env[63345]: DEBUG nova.compute.provider_tree [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1058.520688] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e47803-256b-49e8-b240-158997197f5f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.535218] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5297adec-b4fa-1459-55d7-7555567087a6, 'name': SearchDatastore_Task, 'duration_secs': 0.02847} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.535674] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1058.536653] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 5cefe8a6-4af0-47d4-84f5-1d579d0c9968/5cefe8a6-4af0-47d4-84f5-1d579d0c9968.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1058.536653] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b5cab91-3a9b-48be-ac05-f2525b4f57ac {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.544213] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1058.544213] env[63345]: value = "task-1017807" [ 1058.544213] env[63345]: _type = "Task" [ 1058.544213] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.559944] env[63345]: DEBUG nova.compute.manager [req-12f1d9a4-b0d4-4945-b805-8597df733d2a req-54b6b3e4-25d4-4ae7-ab9b-8c6231d06c02 service nova] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Detach interface failed, port_id=8a3e5f64-f812-4c1b-a9e0-b8b3146a1467, reason: Instance dd624e54-bd5b-4660-88a1-9d6f36560421 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1058.563256] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017807, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.638148] env[63345]: DEBUG nova.network.neutron [-] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.662551] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0f731922-bcd7-48d2-98fc-77e4abacf901 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.673741] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b80b499-6120-4f31-8b83-2c25e1a6331b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.684881] env[63345]: DEBUG nova.network.neutron [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Updating instance_info_cache with network_info: [{"id": "4829f314-cace-49cc-b77a-016ee4b1c7e6", "address": "fa:16:3e:27:64:94", "network": {"id": "18b67684-3f06-4f15-be40-ba0b2769b248", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1680877425-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb91ecf5d00e48dea9baf2122ac4fed7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4829f314-ca", "ovs_interfaceid": "4829f314-cace-49cc-b77a-016ee4b1c7e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.708729] env[63345]: DEBUG nova.compute.manager [req-bf4111e5-dcc3-4ec5-bc0c-48a27af4c59d req-d4568578-a614-4062-8378-63daa75f6cf0 service nova] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Detach interface failed, port_id=a4151246-fc54-4f99-9110-a65de119bea6, reason: Instance b3e0831b-b8f1-40c4-be01-71ed6484dbc0 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1058.823145] env[63345]: DEBUG nova.network.neutron [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Port 114e38e0-a558-4242-ad5b-4aac063dcb72 binding to destination host cpu-1 is already ACTIVE {{(pid=63345) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1058.987759] env[63345]: INFO nova.compute.manager [-] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Took 1.30 seconds to deallocate network for instance. [ 1059.055494] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017807, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50363} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.055790] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 5cefe8a6-4af0-47d4-84f5-1d579d0c9968/5cefe8a6-4af0-47d4-84f5-1d579d0c9968.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1059.056065] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1059.056346] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3808bef9-d57e-423d-aae6-d470c6f59dee {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.062512] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1059.062512] env[63345]: value = "task-1017808" [ 1059.062512] env[63345]: _type = "Task" [ 1059.062512] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.063412] env[63345]: DEBUG nova.scheduler.client.report [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 149 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1059.063655] env[63345]: DEBUG nova.compute.provider_tree [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 149 to 150 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1059.063864] env[63345]: DEBUG nova.compute.provider_tree [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1059.075784] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017808, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.141342] env[63345]: INFO nova.compute.manager [-] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Took 1.21 seconds to deallocate network for instance. [ 1059.187947] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Releasing lock "refresh_cache-bce78147-6f6d-47a2-84f3-482f59a8bb8e" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1059.494912] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.568579] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.249s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.574504] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.080s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.574782] env[63345]: DEBUG nova.objects.instance [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lazy-loading 'resources' on Instance uuid dd624e54-bd5b-4660-88a1-9d6f36560421 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1059.581674] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017808, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059609} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.582821] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1059.583833] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d00f0793-e975-4db1-99d0-069bc16b9ddd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.587987] env[63345]: INFO nova.scheduler.client.report [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Deleted allocations for instance da3408a0-cce7-4252-be47-097f081d83c1 [ 1059.606053] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] 5cefe8a6-4af0-47d4-84f5-1d579d0c9968/5cefe8a6-4af0-47d4-84f5-1d579d0c9968.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1059.608828] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-665e0768-ae00-46c7-8dbd-afcbd81505b7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.643710] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1059.643710] env[63345]: value = "task-1017809" [ 1059.643710] env[63345]: _type = "Task" [ 1059.643710] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.648248] env[63345]: DEBUG oslo_concurrency.lockutils [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.652494] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017809, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.740118] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1059.740352] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1059.740516] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Starting heal instance info cache {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 1059.846339] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "726332dd-8699-49a4-a9ea-b9cbfc159855-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.846582] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "726332dd-8699-49a4-a9ea-b9cbfc159855-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.846778] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "726332dd-8699-49a4-a9ea-b9cbfc159855-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1060.126740] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9ae67186-5cb5-4cf4-b13b-93933d98b534 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "da3408a0-cce7-4252-be47-097f081d83c1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.358s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1060.153805] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017809, 'name': ReconfigVM_Task, 'duration_secs': 0.337098} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.154179] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Reconfigured VM instance instance-0000006d to attach disk [datastore2] 5cefe8a6-4af0-47d4-84f5-1d579d0c9968/5cefe8a6-4af0-47d4-84f5-1d579d0c9968.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1060.154775] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-747e7712-5fc2-4f94-b31e-684a39249313 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.178670] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1060.178670] env[63345]: value = "task-1017810" [ 1060.178670] env[63345]: _type = "Task" [ 1060.178670] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.188385] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017810, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.257513] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0923ff3-4057-49c1-85c9-79a2eb8cbeea {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.266516] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c221b4-ce45-4884-aaf0-d59a5b0282ca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.300887] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e710b645-2c01-4c68-9028-3da081aea74d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.308244] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ee8739-ab09-49d4-ae9d-09a4710a7d6d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.323015] env[63345]: DEBUG nova.compute.provider_tree [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1060.615996] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "0f3f59b6-e7bc-4657-af5f-eec18efc3666" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.616304] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "0f3f59b6-e7bc-4657-af5f-eec18efc3666" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.616549] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "0f3f59b6-e7bc-4657-af5f-eec18efc3666-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.616726] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "0f3f59b6-e7bc-4657-af5f-eec18efc3666-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.617257] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "0f3f59b6-e7bc-4657-af5f-eec18efc3666-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1060.619211] env[63345]: INFO nova.compute.manager [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Terminating instance [ 1060.688810] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017810, 'name': Rename_Task, 'duration_secs': 0.161602} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.689110] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1060.689363] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b9d98be3-4480-4346-8238-5b0cd01ae520 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.696656] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1060.696656] env[63345]: value = "task-1017811" [ 1060.696656] env[63345]: _type = "Task" [ 1060.696656] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.703532] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25b7a77-7490-498a-88a8-1bc21f5dbfd9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.708712] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017811, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.725328] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Updating instance 'bce78147-6f6d-47a2-84f3-482f59a8bb8e' progress to 0 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1060.854903] env[63345]: DEBUG nova.scheduler.client.report [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 150 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1060.855197] env[63345]: DEBUG nova.compute.provider_tree [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 150 to 151 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1060.855391] env[63345]: DEBUG nova.compute.provider_tree [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1060.886923] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1060.887158] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.887341] env[63345]: DEBUG nova.network.neutron [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1061.124799] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "refresh_cache-0f3f59b6-e7bc-4657-af5f-eec18efc3666" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1061.124973] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquired lock "refresh_cache-0f3f59b6-e7bc-4657-af5f-eec18efc3666" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.125199] env[63345]: DEBUG nova.network.neutron [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1061.207074] env[63345]: DEBUG oslo_vmware.api [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017811, 'name': PowerOnVM_Task, 'duration_secs': 0.409962} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.207366] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1061.207575] env[63345]: DEBUG nova.compute.manager [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1061.208340] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3e9909-758a-42b7-88b1-051be0da85b1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.231593] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1061.231892] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62fc8ddb-6f56-416c-98c3-195782de5c0b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.239328] env[63345]: DEBUG oslo_vmware.api [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1061.239328] env[63345]: value = "task-1017812" [ 1061.239328] env[63345]: _type = "Task" [ 1061.239328] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.247434] env[63345]: DEBUG oslo_vmware.api [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017812, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.361805] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.787s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.364590] env[63345]: DEBUG oslo_concurrency.lockutils [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.716s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.365102] env[63345]: DEBUG nova.objects.instance [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lazy-loading 'resources' on Instance uuid b3e0831b-b8f1-40c4-be01-71ed6484dbc0 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1061.379268] env[63345]: INFO nova.scheduler.client.report [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Deleted allocations for instance dd624e54-bd5b-4660-88a1-9d6f36560421 [ 1061.643717] env[63345]: DEBUG nova.network.neutron [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updating instance_info_cache with network_info: [{"id": "114e38e0-a558-4242-ad5b-4aac063dcb72", "address": "fa:16:3e:bb:2c:f8", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap114e38e0-a5", "ovs_interfaceid": "114e38e0-a558-4242-ad5b-4aac063dcb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.645769] env[63345]: DEBUG nova.network.neutron [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1061.693835] env[63345]: DEBUG nova.network.neutron [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.726750] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.749870] env[63345]: DEBUG oslo_vmware.api [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017812, 'name': PowerOffVM_Task, 'duration_secs': 0.195673} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.750167] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1061.750363] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Updating instance 'bce78147-6f6d-47a2-84f3-482f59a8bb8e' progress to 17 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1061.886544] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fb09a56d-023a-4ce7-b2d5-445863df611c tempest-AttachInterfacesTestJSON-1256861117 tempest-AttachInterfacesTestJSON-1256861117-project-member] Lock "dd624e54-bd5b-4660-88a1-9d6f36560421" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.857s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.005679] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Acquiring lock "5cefe8a6-4af0-47d4-84f5-1d579d0c9968" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.007033] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Lock "5cefe8a6-4af0-47d4-84f5-1d579d0c9968" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.007033] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Acquiring lock "5cefe8a6-4af0-47d4-84f5-1d579d0c9968-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.007033] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Lock "5cefe8a6-4af0-47d4-84f5-1d579d0c9968-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.007033] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Lock "5cefe8a6-4af0-47d4-84f5-1d579d0c9968-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.008796] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa0039f-ad3c-4c74-a786-89c2f38cb5bd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.011519] env[63345]: INFO nova.compute.manager [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Terminating instance [ 1062.019933] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2207e4-698a-4d89-9421-0de94066b113 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.050522] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3325ded9-829d-40de-bb05-629ccf1d6fc6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.057818] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592575d2-19c5-4ac1-9f19-b0193d8c1b5e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.071154] env[63345]: DEBUG nova.compute.provider_tree [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1062.149307] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.197071] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Releasing lock "refresh_cache-0f3f59b6-e7bc-4657-af5f-eec18efc3666" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.197567] env[63345]: DEBUG nova.compute.manager [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1062.197820] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1062.199356] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94502d5d-bf28-4975-a550-7ff538d9e975 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.207455] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1062.207704] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-77bc364c-710f-4ea1-91c5-9b5d5d42303b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.214111] env[63345]: DEBUG oslo_vmware.api [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1062.214111] env[63345]: value = "task-1017813" [ 1062.214111] env[63345]: _type = "Task" [ 1062.214111] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.222237] env[63345]: DEBUG oslo_vmware.api [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017813, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.259233] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1062.259635] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1062.259899] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1062.260253] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1062.260514] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1062.260790] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1062.261165] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1062.261450] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1062.261740] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1062.262038] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1062.262361] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1062.270928] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Didn't find any instances for network info cache update. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10343}} [ 1062.271770] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1b3fe80-c126-46bc-9256-dd4352c92437 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.290194] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1062.290357] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1062.291411] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1062.292030] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1062.292030] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1062.292255] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1062.292993] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63345) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 1062.292993] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager.update_available_resource {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1062.297955] env[63345]: DEBUG oslo_vmware.api [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1062.297955] env[63345]: value = "task-1017814" [ 1062.297955] env[63345]: _type = "Task" [ 1062.297955] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.309324] env[63345]: DEBUG oslo_vmware.api [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017814, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.516490] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Acquiring lock "refresh_cache-5cefe8a6-4af0-47d4-84f5-1d579d0c9968" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1062.516711] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Acquired lock "refresh_cache-5cefe8a6-4af0-47d4-84f5-1d579d0c9968" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.516913] env[63345]: DEBUG nova.network.neutron [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1062.573911] env[63345]: DEBUG nova.scheduler.client.report [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1062.677037] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00736cfd-cf66-4aa7-a43b-8fb532daea1e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.696871] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-512b0d4b-fa26-47f6-912c-6b3d1fa79d7e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.704180] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updating instance '726332dd-8699-49a4-a9ea-b9cbfc159855' progress to 83 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1062.723730] env[63345]: DEBUG oslo_vmware.api [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017813, 'name': PowerOffVM_Task, 'duration_secs': 0.136553} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.723994] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1062.724183] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1062.724422] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-407657f3-5d4b-42a3-b0d0-c10f789e958f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.754366] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1062.754586] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1062.754784] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Deleting the datastore file [datastore2] 0f3f59b6-e7bc-4657-af5f-eec18efc3666 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1062.755098] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1bb3f689-c325-4877-beff-a1727d5dd32b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.762508] env[63345]: DEBUG oslo_vmware.api [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for the task: (returnval){ [ 1062.762508] env[63345]: value = "task-1017816" [ 1062.762508] env[63345]: _type = "Task" [ 1062.762508] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.772197] env[63345]: DEBUG oslo_vmware.api [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017816, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.795904] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.812045] env[63345]: DEBUG oslo_vmware.api [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017814, 'name': ReconfigVM_Task, 'duration_secs': 0.159261} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.812497] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Updating instance 'bce78147-6f6d-47a2-84f3-482f59a8bb8e' progress to 33 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1063.035335] env[63345]: DEBUG nova.network.neutron [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1063.079262] env[63345]: DEBUG oslo_concurrency.lockutils [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.715s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.081506] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.355s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.081709] env[63345]: DEBUG nova.objects.instance [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63345) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1063.099462] env[63345]: INFO nova.scheduler.client.report [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleted allocations for instance b3e0831b-b8f1-40c4-be01-71ed6484dbc0 [ 1063.130710] env[63345]: DEBUG nova.network.neutron [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.210424] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4e036ee5-bb70-4754-a5fc-c53eb54712f9 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updating instance '726332dd-8699-49a4-a9ea-b9cbfc159855' progress to 100 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1063.274247] env[63345]: DEBUG oslo_vmware.api [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Task: {'id': task-1017816, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087539} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.274534] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1063.274727] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1063.274912] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1063.275103] env[63345]: INFO nova.compute.manager [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1063.275378] env[63345]: DEBUG oslo.service.loopingcall [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1063.275673] env[63345]: DEBUG nova.compute.manager [-] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1063.275770] env[63345]: DEBUG nova.network.neutron [-] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1063.296377] env[63345]: DEBUG nova.network.neutron [-] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1063.322239] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1063.322513] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1063.322670] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1063.322859] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1063.323228] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1063.323428] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1063.324568] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1063.324568] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1063.324568] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1063.324568] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1063.324568] env[63345]: DEBUG nova.virt.hardware [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1063.608758] env[63345]: DEBUG oslo_concurrency.lockutils [None req-500756a4-c220-4c65-b483-0c39e8abc952 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "b3e0831b-b8f1-40c4-be01-71ed6484dbc0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.825s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.634676] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Releasing lock "refresh_cache-5cefe8a6-4af0-47d4-84f5-1d579d0c9968" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.636961] env[63345]: DEBUG nova.compute.manager [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1063.637189] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1063.640084] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2aaeb2e-556d-460c-9535-8066c3ba1046 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.652389] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1063.653022] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6a49026-15f1-4d72-bbd2-b96828822816 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.662137] env[63345]: DEBUG oslo_vmware.api [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1063.662137] env[63345]: value = "task-1017817" [ 1063.662137] env[63345]: _type = "Task" [ 1063.662137] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.672260] env[63345]: DEBUG oslo_vmware.api [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017817, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.798664] env[63345]: DEBUG nova.network.neutron [-] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.834738] env[63345]: ERROR nova.compute.manager [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Setting instance vm_state to ERROR: AttributeError: 'NoneType' object has no attribute 'key' [ 1063.834738] env[63345]: ERROR nova.compute.manager [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Traceback (most recent call last): [ 1063.834738] env[63345]: ERROR nova.compute.manager [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] File "/opt/stack/nova/nova/compute/manager.py", line 11194, in _error_out_instance_on_exception [ 1063.834738] env[63345]: ERROR nova.compute.manager [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] yield [ 1063.834738] env[63345]: ERROR nova.compute.manager [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] File "/opt/stack/nova/nova/compute/manager.py", line 6434, in _resize_instance [ 1063.834738] env[63345]: ERROR nova.compute.manager [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] disk_info = self.driver.migrate_disk_and_power_off( [ 1063.834738] env[63345]: ERROR nova.compute.manager [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 266, in migrate_disk_and_power_off [ 1063.834738] env[63345]: ERROR nova.compute.manager [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] return self._vmops.migrate_disk_and_power_off(context, instance, [ 1063.834738] env[63345]: ERROR nova.compute.manager [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1467, in migrate_disk_and_power_off [ 1063.834738] env[63345]: ERROR nova.compute.manager [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] self._resize_disk(instance, vm_ref, vmdk, flavor) [ 1063.834738] env[63345]: ERROR nova.compute.manager [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1398, in _resize_disk [ 1063.834738] env[63345]: ERROR nova.compute.manager [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] self._volumeops.detach_disk_from_vm(vm_ref, instance, vmdk.device) [ 1063.834738] env[63345]: ERROR nova.compute.manager [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 121, in detach_disk_from_vm [ 1063.834738] env[63345]: ERROR nova.compute.manager [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] disk_key = device.key [ 1063.834738] env[63345]: ERROR nova.compute.manager [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] AttributeError: 'NoneType' object has no attribute 'key' [ 1063.834738] env[63345]: ERROR nova.compute.manager [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] [ 1064.091888] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e1bafc89-cab2-45d4-8338-1354f102d65c tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.093158] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.297s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.094432] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.094432] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63345) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1064.097196] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fb292b-d1be-41e0-a567-51b18b617352 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.108548] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160f7dcb-75ac-4fb0-bc5a-6a1b2635ff17 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.125576] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1efc3350-9258-4a0a-9899-7e8ddcfe38de {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.132988] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-429193b4-2e10-496e-a9d1-3f996af62406 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.163932] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179885MB free_disk=187GB free_vcpus=48 pci_devices=None {{(pid=63345) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1064.164122] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.164328] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.178774] env[63345]: DEBUG oslo_vmware.api [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017817, 'name': PowerOffVM_Task, 'duration_secs': 0.130003} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.179053] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1064.179241] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1064.179490] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c742dd9-13c1-4f2a-b951-026e4f0fb497 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.213168] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1064.213417] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1064.213610] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Deleting the datastore file [datastore2] 5cefe8a6-4af0-47d4-84f5-1d579d0c9968 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1064.214157] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-08994a9c-70f2-4b2a-b256-3746d1fc7f9b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.226539] env[63345]: DEBUG oslo_vmware.api [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for the task: (returnval){ [ 1064.226539] env[63345]: value = "task-1017819" [ 1064.226539] env[63345]: _type = "Task" [ 1064.226539] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.237358] env[63345]: DEBUG oslo_vmware.api [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017819, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.304381] env[63345]: INFO nova.compute.manager [-] [instance: 0f3f59b6-e7bc-4657-af5f-eec18efc3666] Took 1.03 seconds to deallocate network for instance. [ 1064.356438] env[63345]: INFO nova.compute.manager [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Swapping old allocation on dict_keys(['fc35ddde-c15e-4ab8-bf77-a06ae0805b57']) held by migration 0057592c-761e-46cb-9854-74b439bd8605 for instance [ 1064.381890] env[63345]: DEBUG nova.scheduler.client.report [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Overwriting current allocation {'allocations': {'fc35ddde-c15e-4ab8-bf77-a06ae0805b57': {'resources': {'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 151}}, 'project_id': 'cb91ecf5d00e48dea9baf2122ac4fed7', 'user_id': 'dd618fef89a843209784ca9e925d18eb', 'consumer_generation': 1} on consumer bce78147-6f6d-47a2-84f3-482f59a8bb8e {{(pid=63345) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2033}} [ 1064.747651] env[63345]: DEBUG oslo_vmware.api [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Task: {'id': task-1017819, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084848} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.748318] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1064.748318] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1064.748318] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1064.751019] env[63345]: INFO nova.compute.manager [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1064.751019] env[63345]: DEBUG oslo.service.loopingcall [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1064.751019] env[63345]: DEBUG nova.compute.manager [-] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1064.751019] env[63345]: DEBUG nova.network.neutron [-] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1064.777794] env[63345]: DEBUG nova.network.neutron [-] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1064.808481] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1065.105894] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "bce78147-6f6d-47a2-84f3-482f59a8bb8e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1065.105894] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "bce78147-6f6d-47a2-84f3-482f59a8bb8e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1065.106057] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "bce78147-6f6d-47a2-84f3-482f59a8bb8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1065.106247] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "bce78147-6f6d-47a2-84f3-482f59a8bb8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1065.106392] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "bce78147-6f6d-47a2-84f3-482f59a8bb8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1065.108514] env[63345]: INFO nova.compute.manager [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Terminating instance [ 1065.122299] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "7245e83c-2dda-4b2f-8a65-07f7e4d6828a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1065.122380] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "7245e83c-2dda-4b2f-8a65-07f7e4d6828a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1065.182530] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Applying migration context for instance 726332dd-8699-49a4-a9ea-b9cbfc159855 as it has an incoming, in-progress migration 6995199b-b85e-43ee-9f2c-baf2244d91a8. Migration status is finished {{(pid=63345) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1065.182783] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Applying migration context for instance bce78147-6f6d-47a2-84f3-482f59a8bb8e as it has an incoming, in-progress migration 0057592c-761e-46cb-9854-74b439bd8605. Migration status is error {{(pid=63345) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1065.184078] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Skipping migration as instance is neither resizing nor live-migrating. {{(pid=63345) _update_usage_from_migrations /opt/stack/nova/nova/compute/resource_tracker.py:1563}} [ 1065.184244] env[63345]: INFO nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updating resource usage from migration 6995199b-b85e-43ee-9f2c-baf2244d91a8 [ 1065.207800] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 869f8110-6490-4a47-955a-0ce085f826af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1065.208055] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 22a11cf9-8f85-4371-98eb-25b267c9aff7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1065.208306] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1065.208521] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 95738bee-d291-4f27-aeff-9445939bb3fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1065.208714] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 0f3f59b6-e7bc-4657-af5f-eec18efc3666 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1065.208903] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 95ef4f91-a618-4ae2-95ad-d027c031f239 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1065.209107] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 5cefe8a6-4af0-47d4-84f5-1d579d0c9968 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1065.209296] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Migration 6995199b-b85e-43ee-9f2c-baf2244d91a8 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1065.209500] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 726332dd-8699-49a4-a9ea-b9cbfc159855 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1065.209701] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance bce78147-6f6d-47a2-84f3-482f59a8bb8e actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1065.280874] env[63345]: DEBUG nova.network.neutron [-] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.613138] env[63345]: DEBUG nova.compute.manager [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1065.613507] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1065.613850] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e236f985-3a7c-4a17-bf28-48e355a4dc4a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.625694] env[63345]: DEBUG nova.compute.manager [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1065.641977] env[63345]: DEBUG oslo_vmware.api [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1065.641977] env[63345]: value = "task-1017820" [ 1065.641977] env[63345]: _type = "Task" [ 1065.641977] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.655021] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] VM already powered off {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1065.655021] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Volume detach. Driver type: vmdk {{(pid=63345) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1065.655021] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226146', 'volume_id': '20d77566-3841-4d48-8c1e-d94d3b3b3333', 'name': 'volume-20d77566-3841-4d48-8c1e-d94d3b3b3333', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bce78147-6f6d-47a2-84f3-482f59a8bb8e', 'attached_at': '', 'detached_at': '', 'volume_id': '20d77566-3841-4d48-8c1e-d94d3b3b3333', 'serial': '20d77566-3841-4d48-8c1e-d94d3b3b3333'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1065.655021] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b11edea6-084f-4aff-90d0-4ae5e4595196 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.675110] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-823ce85e-acd4-4d11-a4ab-d73e7f648d08 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.684342] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cbe7767-91ce-4423-a940-e1ee93b1c2ca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.705879] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8e65d3-0cbd-4820-9644-1e0f61d59234 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.722235] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 7245e83c-2dda-4b2f-8a65-07f7e4d6828a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1065.722485] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1065.722637] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1065.725683] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] The volume has not been displaced from its original location: [datastore1] volume-20d77566-3841-4d48-8c1e-d94d3b3b3333/volume-20d77566-3841-4d48-8c1e-d94d3b3b3333.vmdk. No consolidation needed. {{(pid=63345) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1065.730894] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1065.731442] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e8da01a-8f54-476b-84cf-1b4d0dee373c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.752469] env[63345]: DEBUG oslo_vmware.api [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1065.752469] env[63345]: value = "task-1017821" [ 1065.752469] env[63345]: _type = "Task" [ 1065.752469] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.761753] env[63345]: DEBUG oslo_vmware.api [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017821, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.784806] env[63345]: INFO nova.compute.manager [-] [instance: 5cefe8a6-4af0-47d4-84f5-1d579d0c9968] Took 1.04 seconds to deallocate network for instance. [ 1065.899579] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3fe945c-8e66-483e-8122-de8344a05eac {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.907819] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a32ad1-9efb-4518-9935-d8f71bcf8f53 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.944508] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6243c249-3b94-4f2d-b8c4-b9a35c44b44b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.948722] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1065.955854] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5403676-f0fd-44a2-bc97-8b155e986dfc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.972667] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1065.992978] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "726332dd-8699-49a4-a9ea-b9cbfc159855" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1065.993280] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "726332dd-8699-49a4-a9ea-b9cbfc159855" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1065.993485] env[63345]: DEBUG nova.compute.manager [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Going to confirm migration 4 {{(pid=63345) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5112}} [ 1066.145387] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.262973] env[63345]: DEBUG oslo_vmware.api [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017821, 'name': ReconfigVM_Task, 'duration_secs': 0.231131} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.263282] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1066.268312] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ac012e7-c1bd-48f0-9838-5aad2870569a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.285933] env[63345]: DEBUG oslo_vmware.api [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1066.285933] env[63345]: value = "task-1017822" [ 1066.285933] env[63345]: _type = "Task" [ 1066.285933] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.293868] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.294208] env[63345]: DEBUG oslo_vmware.api [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017822, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.490547] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "bce78147-6f6d-47a2-84f3-482f59a8bb8e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.496793] env[63345]: ERROR nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [req-1ca18e40-ca7c-4272-b5b9-06f0de794250] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1ca18e40-ca7c-4272-b5b9-06f0de794250"}]} [ 1066.520374] env[63345]: DEBUG nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1066.541190] env[63345]: DEBUG nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1066.541721] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1066.556185] env[63345]: DEBUG nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1066.577060] env[63345]: DEBUG nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1066.580298] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1066.580499] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.580746] env[63345]: DEBUG nova.network.neutron [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1066.580905] env[63345]: DEBUG nova.objects.instance [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lazy-loading 'info_cache' on Instance uuid 726332dd-8699-49a4-a9ea-b9cbfc159855 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.799753] env[63345]: DEBUG oslo_vmware.api [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017822, 'name': ReconfigVM_Task, 'duration_secs': 0.112557} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.800574] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226146', 'volume_id': '20d77566-3841-4d48-8c1e-d94d3b3b3333', 'name': 'volume-20d77566-3841-4d48-8c1e-d94d3b3b3333', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bce78147-6f6d-47a2-84f3-482f59a8bb8e', 'attached_at': '', 'detached_at': '', 'volume_id': '20d77566-3841-4d48-8c1e-d94d3b3b3333', 'serial': '20d77566-3841-4d48-8c1e-d94d3b3b3333'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1066.800930] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1066.801756] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f16cf5d-c26c-4fa8-8f32-46221f80c71b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.810331] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1066.811328] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1537189-67ec-4c9e-8497-a0de07d0aeda {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.848154] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e719be-d84a-40d1-b1bf-d30179a5e04e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.856743] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e10ebb0-79aa-4c9e-9d8d-22a54b315d2f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.894681] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d16693f-e7c3-4b00-959c-6e807557e3f0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.897966] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1066.898199] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Deleting contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1066.898385] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleting the datastore file [datastore1] bce78147-6f6d-47a2-84f3-482f59a8bb8e {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1066.898625] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-601c6d8e-4969-4c4a-9c45-6e63325644d4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.906735] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae85b92d-e643-4353-b1a2-6bc4b50a9a52 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.911199] env[63345]: DEBUG oslo_vmware.api [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1066.911199] env[63345]: value = "task-1017824" [ 1066.911199] env[63345]: _type = "Task" [ 1066.911199] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.923265] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1066.932754] env[63345]: DEBUG oslo_vmware.api [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017824, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.424050] env[63345]: DEBUG oslo_vmware.api [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017824, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074329} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.424428] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1067.425178] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Deleted contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1067.425520] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1067.426180] env[63345]: INFO nova.compute.manager [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Took 1.81 seconds to destroy the instance on the hypervisor. [ 1067.426430] env[63345]: DEBUG oslo.service.loopingcall [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1067.429126] env[63345]: DEBUG nova.compute.manager [-] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1067.429390] env[63345]: DEBUG nova.network.neutron [-] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1067.469089] env[63345]: DEBUG nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 153 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1067.469485] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 153 to 154 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1067.469893] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1067.873021] env[63345]: DEBUG oslo_concurrency.lockutils [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.873021] env[63345]: DEBUG oslo_concurrency.lockutils [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.873021] env[63345]: DEBUG oslo_concurrency.lockutils [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.873021] env[63345]: DEBUG oslo_concurrency.lockutils [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.873021] env[63345]: DEBUG oslo_concurrency.lockutils [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.874098] env[63345]: INFO nova.compute.manager [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Terminating instance [ 1067.942961] env[63345]: DEBUG nova.network.neutron [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updating instance_info_cache with network_info: [{"id": "114e38e0-a558-4242-ad5b-4aac063dcb72", "address": "fa:16:3e:bb:2c:f8", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap114e38e0-a5", "ovs_interfaceid": "114e38e0-a558-4242-ad5b-4aac063dcb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.978741] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63345) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1067.978741] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.812s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.978741] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.169s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.978741] env[63345]: DEBUG nova.objects.instance [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lazy-loading 'resources' on Instance uuid 0f3f59b6-e7bc-4657-af5f-eec18efc3666 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1067.978741] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1067.978741] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Cleaning up deleted instances {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11557}} [ 1068.157841] env[63345]: DEBUG nova.compute.manager [req-d8845113-6c00-4b33-8071-d30feb39cedb req-7df10dec-43e0-48e2-871c-b928e5948f3b service nova] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Received event network-vif-deleted-4829f314-cace-49cc-b77a-016ee4b1c7e6 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1068.158068] env[63345]: INFO nova.compute.manager [req-d8845113-6c00-4b33-8071-d30feb39cedb req-7df10dec-43e0-48e2-871c-b928e5948f3b service nova] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Neutron deleted interface 4829f314-cace-49cc-b77a-016ee4b1c7e6; detaching it from the instance and deleting it from the info cache [ 1068.158258] env[63345]: DEBUG nova.network.neutron [req-d8845113-6c00-4b33-8071-d30feb39cedb req-7df10dec-43e0-48e2-871c-b928e5948f3b service nova] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.377703] env[63345]: DEBUG nova.compute.manager [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1068.377954] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1068.378872] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb15836a-c349-4364-b3cd-8fc14bfffa44 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.387409] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1068.387663] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-297332c8-c647-42ce-8813-d94988449546 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.396384] env[63345]: DEBUG oslo_vmware.api [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1068.396384] env[63345]: value = "task-1017825" [ 1068.396384] env[63345]: _type = "Task" [ 1068.396384] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.405348] env[63345]: DEBUG oslo_vmware.api [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017825, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.450336] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1068.450876] env[63345]: DEBUG nova.objects.instance [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lazy-loading 'migration_context' on Instance uuid 726332dd-8699-49a4-a9ea-b9cbfc159855 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.453583] env[63345]: DEBUG nova.network.neutron [-] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.498335] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] There are 58 instances to clean {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11566}} [ 1068.498335] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: b3e0831b-b8f1-40c4-be01-71ed6484dbc0] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1068.663542] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7d4037-9ffe-4e4c-a997-13114e733f19 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.670041] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-223ef787-9786-4154-9990-93a5fdd4570f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.683782] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ab812d-5991-4baa-8953-6d40b56cca59 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.692953] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e534d2-1f53-4fcc-bcdc-e6e9b8fd600c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.737592] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3523a646-c1f7-4a1c-9337-0fc1909003cf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.740543] env[63345]: DEBUG nova.compute.manager [req-d8845113-6c00-4b33-8071-d30feb39cedb req-7df10dec-43e0-48e2-871c-b928e5948f3b service nova] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Detach interface failed, port_id=4829f314-cace-49cc-b77a-016ee4b1c7e6, reason: Instance bce78147-6f6d-47a2-84f3-482f59a8bb8e could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1068.747743] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54be0127-4208-4fe8-bdf7-2ce864e5a225 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.763716] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "148c961e-d260-4dbd-ad9f-52f94b072096" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1068.763978] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "148c961e-d260-4dbd-ad9f-52f94b072096" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1068.765930] env[63345]: DEBUG nova.compute.provider_tree [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1068.906970] env[63345]: DEBUG oslo_vmware.api [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017825, 'name': PowerOffVM_Task, 'duration_secs': 0.208304} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.907283] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1068.907462] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1068.907731] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc561d4a-053d-439d-aa1a-a6d2fc8f4de1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.958907] env[63345]: DEBUG nova.objects.base [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Object Instance<726332dd-8699-49a4-a9ea-b9cbfc159855> lazy-loaded attributes: info_cache,migration_context {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1068.958907] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d97d50-bb2d-4517-9329-912b2eed1b14 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.964034] env[63345]: INFO nova.compute.manager [-] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Took 1.53 seconds to deallocate network for instance. [ 1068.983014] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c73c383-c6d6-4732-992a-0ef6b9203fe8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.985588] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1068.985784] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1068.985967] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleting the datastore file [datastore2] 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1068.986233] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01663466-8087-483a-a3c1-488076987ac9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.991303] env[63345]: DEBUG oslo_vmware.api [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1068.991303] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]520ffacd-d7d1-f759-1e8b-d99ff5cf3c5a" [ 1068.991303] env[63345]: _type = "Task" [ 1068.991303] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.996721] env[63345]: DEBUG oslo_vmware.api [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1068.996721] env[63345]: value = "task-1017827" [ 1068.996721] env[63345]: _type = "Task" [ 1068.996721] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.004092] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: da3408a0-cce7-4252-be47-097f081d83c1] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1069.006273] env[63345]: DEBUG oslo_vmware.api [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]520ffacd-d7d1-f759-1e8b-d99ff5cf3c5a, 'name': SearchDatastore_Task, 'duration_secs': 0.007172} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.007033] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.011897] env[63345]: DEBUG oslo_vmware.api [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017827, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.109532] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.109850] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.270020] env[63345]: DEBUG nova.compute.manager [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1069.270461] env[63345]: DEBUG nova.scheduler.client.report [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1069.508123] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: a7d80763-92f0-45a9-b24b-1f973bffb376] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1069.510048] env[63345]: DEBUG oslo_vmware.api [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017827, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136223} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.511351] env[63345]: INFO nova.compute.manager [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Took 0.55 seconds to detach 1 volumes for instance. [ 1069.513137] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1069.513441] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1069.513704] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1069.513897] env[63345]: INFO nova.compute.manager [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1069.514228] env[63345]: DEBUG oslo.service.loopingcall [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1069.514820] env[63345]: DEBUG nova.compute.manager [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Deleting volume: 20d77566-3841-4d48-8c1e-d94d3b3b3333 {{(pid=63345) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3281}} [ 1069.517018] env[63345]: DEBUG nova.compute.manager [-] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1069.517018] env[63345]: DEBUG nova.network.neutron [-] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1069.612855] env[63345]: DEBUG nova.compute.manager [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1069.778700] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.800s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.782564] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.834s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.803222] env[63345]: INFO nova.scheduler.client.report [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Deleted allocations for instance 0f3f59b6-e7bc-4657-af5f-eec18efc3666 [ 1069.808976] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.939679] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603f1de1-5fa2-446b-a121-c797e5e87dab {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.949648] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980609d3-712e-49a7-b25b-bf4f25f5b23c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.980504] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d67a46-062b-495a-aae5-41bbe659e974 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.989868] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36be75ad-6595-4124-868c-856bc046bf71 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.005460] env[63345]: DEBUG nova.compute.provider_tree [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1070.015108] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 34990fa5-4a89-4430-8ea7-9e73dd41f441] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1070.057960] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.130729] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.184508] env[63345]: DEBUG nova.compute.manager [req-d8c12fdc-5a75-462d-82dd-4299e6a45cfd req-610562c4-d02f-4d7d-911f-1da14b9a64a2 service nova] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Received event network-vif-deleted-bca55223-b7b4-4623-abaf-4d4a68f5b7cc {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1070.184726] env[63345]: INFO nova.compute.manager [req-d8c12fdc-5a75-462d-82dd-4299e6a45cfd req-610562c4-d02f-4d7d-911f-1da14b9a64a2 service nova] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Neutron deleted interface bca55223-b7b4-4623-abaf-4d4a68f5b7cc; detaching it from the instance and deleting it from the info cache [ 1070.184972] env[63345]: DEBUG nova.network.neutron [req-d8c12fdc-5a75-462d-82dd-4299e6a45cfd req-610562c4-d02f-4d7d-911f-1da14b9a64a2 service nova] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.258033] env[63345]: DEBUG nova.network.neutron [-] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.312632] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7830a925-19f9-4416-927e-517b7b3aa6d4 tempest-ServerShowV247Test-1493692582 tempest-ServerShowV247Test-1493692582-project-member] Lock "0f3f59b6-e7bc-4657-af5f-eec18efc3666" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.696s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.508782] env[63345]: DEBUG nova.scheduler.client.report [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1070.517667] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 7057cdfc-a6d9-4e52-b650-6a5709d5f8c2] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1070.570324] env[63345]: DEBUG oslo_concurrency.lockutils [None req-41b12f35-78a9-4c58-a5c8-19ce66b650b9 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.570634] env[63345]: DEBUG oslo_concurrency.lockutils [None req-41b12f35-78a9-4c58-a5c8-19ce66b650b9 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.687402] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d63d91d7-33b6-405f-88e6-f9cd914b3691 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.698242] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44adb4f-22c7-4252-abbe-6523422f16a2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.729074] env[63345]: DEBUG nova.compute.manager [req-d8c12fdc-5a75-462d-82dd-4299e6a45cfd req-610562c4-d02f-4d7d-911f-1da14b9a64a2 service nova] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Detach interface failed, port_id=bca55223-b7b4-4623-abaf-4d4a68f5b7cc, reason: Instance 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1070.759870] env[63345]: INFO nova.compute.manager [-] [instance: 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a] Took 1.24 seconds to deallocate network for instance. [ 1071.014048] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.231s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.014341] env[63345]: INFO nova.compute.manager [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Successfully reverted task state from resize_migrating on failure for instance. [ 1071.021943] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.877s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.023527] env[63345]: INFO nova.compute.claims [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1071.025974] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 3d1e47c5-7e8c-417c-8c7c-009db666d391] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server [None req-6a788111-27eb-4d7f-9dfc-c2ac09935a02 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Exception during message handling: AttributeError: 'NoneType' object has no attribute 'key' [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 172, in _process_incoming [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server raise self.value [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 168, in decorated_function [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server raise self.value [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 159, in decorated_function [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 215, in decorated_function [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server raise self.value [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 205, in decorated_function [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6400, in resize_instance [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server raise self.value [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6397, in resize_instance [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server self._resize_instance(context, instance, image, migration, [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 6434, in _resize_instance [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server disk_info = self.driver.migrate_disk_and_power_off( [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 266, in migrate_disk_and_power_off [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server return self._vmops.migrate_disk_and_power_off(context, instance, [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1467, in migrate_disk_and_power_off [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server self._resize_disk(instance, vm_ref, vmdk, flavor) [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1398, in _resize_disk [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server self._volumeops.detach_disk_from_vm(vm_ref, instance, vmdk.device) [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 121, in detach_disk_from_vm [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server disk_key = device.key [ 1071.028315] env[63345]: ERROR oslo_messaging.rpc.server AttributeError: 'NoneType' object has no attribute 'key' [ 1071.030481] env[63345]: ERROR oslo_messaging.rpc.server [ 1071.073504] env[63345]: INFO nova.compute.manager [None req-41b12f35-78a9-4c58-a5c8-19ce66b650b9 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Detaching volume 87389bc2-cacc-4afc-ae89-1315868453a6 [ 1071.111853] env[63345]: INFO nova.virt.block_device [None req-41b12f35-78a9-4c58-a5c8-19ce66b650b9 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Attempting to driver detach volume 87389bc2-cacc-4afc-ae89-1315868453a6 from mountpoint /dev/sdb [ 1071.112129] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-41b12f35-78a9-4c58-a5c8-19ce66b650b9 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Volume detach. Driver type: vmdk {{(pid=63345) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1071.112343] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-41b12f35-78a9-4c58-a5c8-19ce66b650b9 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226148', 'volume_id': '87389bc2-cacc-4afc-ae89-1315868453a6', 'name': 'volume-87389bc2-cacc-4afc-ae89-1315868453a6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '22a11cf9-8f85-4371-98eb-25b267c9aff7', 'attached_at': '', 'detached_at': '', 'volume_id': '87389bc2-cacc-4afc-ae89-1315868453a6', 'serial': '87389bc2-cacc-4afc-ae89-1315868453a6'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1071.113301] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42a8c02-0faf-401f-808d-2dc9986e63f2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.136957] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4acc76-80f3-44c9-9470-7fc7d58b1d26 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.145142] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811119fe-cea7-416f-b60c-92f027a05064 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.167700] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e958006-2824-45b7-abff-922239201268 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.183450] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-41b12f35-78a9-4c58-a5c8-19ce66b650b9 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] The volume has not been displaced from its original location: [datastore1] volume-87389bc2-cacc-4afc-ae89-1315868453a6/volume-87389bc2-cacc-4afc-ae89-1315868453a6.vmdk. No consolidation needed. {{(pid=63345) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1071.188769] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-41b12f35-78a9-4c58-a5c8-19ce66b650b9 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Reconfiguring VM instance instance-0000005e to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1071.189106] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1bb2a8f4-b9a5-4d3b-b8ae-025f324086d3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.208392] env[63345]: DEBUG oslo_vmware.api [None req-41b12f35-78a9-4c58-a5c8-19ce66b650b9 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1071.208392] env[63345]: value = "task-1017829" [ 1071.208392] env[63345]: _type = "Task" [ 1071.208392] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.216574] env[63345]: DEBUG oslo_vmware.api [None req-41b12f35-78a9-4c58-a5c8-19ce66b650b9 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017829, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.266307] env[63345]: DEBUG oslo_concurrency.lockutils [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.531936] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: a8321259-b3a6-4e87-b13a-b964cf0dd766] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1071.719128] env[63345]: DEBUG oslo_vmware.api [None req-41b12f35-78a9-4c58-a5c8-19ce66b650b9 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017829, 'name': ReconfigVM_Task, 'duration_secs': 0.249938} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.719410] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-41b12f35-78a9-4c58-a5c8-19ce66b650b9 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Reconfigured VM instance instance-0000005e to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1071.724789] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7749e3a-51c5-4b98-b5b8-6e43374f0af4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.741830] env[63345]: DEBUG oslo_vmware.api [None req-41b12f35-78a9-4c58-a5c8-19ce66b650b9 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1071.741830] env[63345]: value = "task-1017830" [ 1071.741830] env[63345]: _type = "Task" [ 1071.741830] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.750457] env[63345]: DEBUG oslo_vmware.api [None req-41b12f35-78a9-4c58-a5c8-19ce66b650b9 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017830, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.038099] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 3d644f16-7924-4545-a528-1499a702d614] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1072.183148] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b69ba9-ee87-4895-9727-b6e39355461e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.191227] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ddce108-01be-4c1f-af68-11dfb3ae380a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.223624] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2dac9c-4c60-4aa7-a9fd-1fcd65a5a787 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.231984] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36895a48-0b0f-4a58-8495-1a91688577b1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.245914] env[63345]: DEBUG nova.compute.provider_tree [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1072.257874] env[63345]: DEBUG oslo_vmware.api [None req-41b12f35-78a9-4c58-a5c8-19ce66b650b9 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017830, 'name': ReconfigVM_Task, 'duration_secs': 0.142825} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.258194] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-41b12f35-78a9-4c58-a5c8-19ce66b650b9 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226148', 'volume_id': '87389bc2-cacc-4afc-ae89-1315868453a6', 'name': 'volume-87389bc2-cacc-4afc-ae89-1315868453a6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '22a11cf9-8f85-4371-98eb-25b267c9aff7', 'attached_at': '', 'detached_at': '', 'volume_id': '87389bc2-cacc-4afc-ae89-1315868453a6', 'serial': '87389bc2-cacc-4afc-ae89-1315868453a6'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1072.541377] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: dd624e54-bd5b-4660-88a1-9d6f36560421] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1072.751704] env[63345]: DEBUG nova.scheduler.client.report [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1072.804355] env[63345]: DEBUG nova.objects.instance [None req-41b12f35-78a9-4c58-a5c8-19ce66b650b9 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lazy-loading 'flavor' on Instance uuid 22a11cf9-8f85-4371-98eb-25b267c9aff7 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1073.044344] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: b3f20003-f75d-4d9f-bb4a-02d2930054a8] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1073.256882] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.235s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.257479] env[63345]: DEBUG nova.compute.manager [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1073.259858] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.966s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.260095] env[63345]: DEBUG nova.objects.instance [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Lazy-loading 'resources' on Instance uuid 5cefe8a6-4af0-47d4-84f5-1d579d0c9968 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1073.548017] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: a0eb9dae-0d27-419f-9210-eaa445e564c8] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1073.742868] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1da5df5e-6301-4d15-8482-22f93cdc6fca tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.766085] env[63345]: DEBUG nova.compute.utils [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1073.767512] env[63345]: DEBUG nova.compute.manager [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1073.767680] env[63345]: DEBUG nova.network.neutron [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1073.808542] env[63345]: DEBUG nova.policy [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb6730bb6292421e8f943bce2e912bef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c535ae9067ab4e8a87e95c68af4624fb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 1073.813913] env[63345]: DEBUG oslo_concurrency.lockutils [None req-41b12f35-78a9-4c58-a5c8-19ce66b650b9 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.243s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.815120] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1da5df5e-6301-4d15-8482-22f93cdc6fca tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.072s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.815338] env[63345]: DEBUG nova.compute.manager [None req-1da5df5e-6301-4d15-8482-22f93cdc6fca tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1073.816426] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3546af-a2a7-46cc-9543-af56631b5205 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.826753] env[63345]: DEBUG nova.compute.manager [None req-1da5df5e-6301-4d15-8482-22f93cdc6fca tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63345) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3403}} [ 1073.827315] env[63345]: DEBUG nova.objects.instance [None req-1da5df5e-6301-4d15-8482-22f93cdc6fca tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lazy-loading 'flavor' on Instance uuid 22a11cf9-8f85-4371-98eb-25b267c9aff7 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1074.051660] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 49cf9c08-4024-40aa-9370-7b4f8d89e2cf] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1074.074323] env[63345]: DEBUG nova.network.neutron [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Successfully created port: 5709e20d-8dfc-41ae-981d-01de437144e3 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1074.162624] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-018b9c10-753d-4b64-b2c0-ef6f7e425730 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.170193] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4493c3a3-416d-4e41-a723-f191f9dd2bb7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.199334] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd2d4ee1-021e-4d72-ba26-a4f423d0f718 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.206832] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62fa01ac-a2a0-4446-a61e-f40e0170f4a9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.221235] env[63345]: DEBUG nova.compute.provider_tree [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.270416] env[63345]: DEBUG nova.compute.manager [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1074.555676] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 0fe61754-458c-4c5c-bb2d-2677302e5fb9] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1074.725208] env[63345]: DEBUG nova.scheduler.client.report [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1074.834018] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1da5df5e-6301-4d15-8482-22f93cdc6fca tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1074.834150] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e0d3ffc-3449-451a-ab82-6d23aaa581e6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.843864] env[63345]: DEBUG oslo_vmware.api [None req-1da5df5e-6301-4d15-8482-22f93cdc6fca tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1074.843864] env[63345]: value = "task-1017831" [ 1074.843864] env[63345]: _type = "Task" [ 1074.843864] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.852157] env[63345]: DEBUG oslo_vmware.api [None req-1da5df5e-6301-4d15-8482-22f93cdc6fca tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017831, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.059220] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: e5546a26-3f94-48a6-914a-2c37e63a0aeb] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1075.230627] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.970s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.233220] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 6.226s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.254970] env[63345]: INFO nova.scheduler.client.report [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Deleted allocations for instance 5cefe8a6-4af0-47d4-84f5-1d579d0c9968 [ 1075.278153] env[63345]: DEBUG nova.compute.manager [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1075.307850] env[63345]: DEBUG nova.virt.hardware [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1075.308297] env[63345]: DEBUG nova.virt.hardware [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1075.308605] env[63345]: DEBUG nova.virt.hardware [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1075.308953] env[63345]: DEBUG nova.virt.hardware [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1075.309250] env[63345]: DEBUG nova.virt.hardware [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1075.309539] env[63345]: DEBUG nova.virt.hardware [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1075.309923] env[63345]: DEBUG nova.virt.hardware [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1075.310257] env[63345]: DEBUG nova.virt.hardware [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1075.310597] env[63345]: DEBUG nova.virt.hardware [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1075.310920] env[63345]: DEBUG nova.virt.hardware [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1075.311426] env[63345]: DEBUG nova.virt.hardware [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1075.313050] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88de692-9c3d-45af-ba7d-3ced2a20e21e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.325267] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd850cc6-5f66-403e-ba7f-dae769ac1359 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.360160] env[63345]: DEBUG oslo_vmware.api [None req-1da5df5e-6301-4d15-8482-22f93cdc6fca tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017831, 'name': PowerOffVM_Task, 'duration_secs': 0.363285} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.360563] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1da5df5e-6301-4d15-8482-22f93cdc6fca tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1075.360893] env[63345]: DEBUG nova.compute.manager [None req-1da5df5e-6301-4d15-8482-22f93cdc6fca tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1075.362128] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648c32d8-3251-4b29-84cc-7eb8507a8c14 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.464442] env[63345]: DEBUG nova.compute.manager [req-a3ee184b-1074-4ee2-a66b-066a093089f6 req-5bef1a93-b10d-45f0-8498-4e25fda27fdc service nova] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Received event network-vif-plugged-5709e20d-8dfc-41ae-981d-01de437144e3 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1075.464696] env[63345]: DEBUG oslo_concurrency.lockutils [req-a3ee184b-1074-4ee2-a66b-066a093089f6 req-5bef1a93-b10d-45f0-8498-4e25fda27fdc service nova] Acquiring lock "7245e83c-2dda-4b2f-8a65-07f7e4d6828a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.464918] env[63345]: DEBUG oslo_concurrency.lockutils [req-a3ee184b-1074-4ee2-a66b-066a093089f6 req-5bef1a93-b10d-45f0-8498-4e25fda27fdc service nova] Lock "7245e83c-2dda-4b2f-8a65-07f7e4d6828a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.465111] env[63345]: DEBUG oslo_concurrency.lockutils [req-a3ee184b-1074-4ee2-a66b-066a093089f6 req-5bef1a93-b10d-45f0-8498-4e25fda27fdc service nova] Lock "7245e83c-2dda-4b2f-8a65-07f7e4d6828a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.465290] env[63345]: DEBUG nova.compute.manager [req-a3ee184b-1074-4ee2-a66b-066a093089f6 req-5bef1a93-b10d-45f0-8498-4e25fda27fdc service nova] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] No waiting events found dispatching network-vif-plugged-5709e20d-8dfc-41ae-981d-01de437144e3 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1075.465503] env[63345]: WARNING nova.compute.manager [req-a3ee184b-1074-4ee2-a66b-066a093089f6 req-5bef1a93-b10d-45f0-8498-4e25fda27fdc service nova] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Received unexpected event network-vif-plugged-5709e20d-8dfc-41ae-981d-01de437144e3 for instance with vm_state building and task_state spawning. [ 1075.559473] env[63345]: DEBUG nova.network.neutron [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Successfully updated port: 5709e20d-8dfc-41ae-981d-01de437144e3 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1075.564066] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 4868a0a0-ca35-44b0-a90c-124aa366af76] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1075.766655] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f72f8512-54fe-4406-a4ee-0321b88648d3 tempest-ServerShowV257Test-942916775 tempest-ServerShowV257Test-942916775-project-member] Lock "5cefe8a6-4af0-47d4-84f5-1d579d0c9968" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.760s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.877512] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1da5df5e-6301-4d15-8482-22f93cdc6fca tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.062s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.907274] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e499dd-7501-4582-b153-e3e23e24e8e6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.915462] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c545d10-30b9-4da0-9d68-f0c26780628b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.951476] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-defd318f-6e6e-4e68-96ea-9eac1c426a9f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.960459] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2d54d3-fec8-4791-9a36-7d32136142f2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.975839] env[63345]: DEBUG nova.compute.provider_tree [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1076.063291] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "refresh_cache-7245e83c-2dda-4b2f-8a65-07f7e4d6828a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1076.063291] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "refresh_cache-7245e83c-2dda-4b2f-8a65-07f7e4d6828a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.063291] env[63345]: DEBUG nova.network.neutron [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1076.065893] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: c84c8b9a-9164-4dd7-b094-dd09c15c6f21] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1076.373167] env[63345]: DEBUG nova.objects.instance [None req-9fe105b1-e6ac-4d43-ada5-bb0fed8bd683 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lazy-loading 'flavor' on Instance uuid 22a11cf9-8f85-4371-98eb-25b267c9aff7 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1076.479734] env[63345]: DEBUG nova.scheduler.client.report [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1076.568024] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 017a06b3-cc1a-4822-a07f-ca881fd4254b] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1076.601230] env[63345]: DEBUG nova.network.neutron [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1076.759745] env[63345]: DEBUG nova.network.neutron [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Updating instance_info_cache with network_info: [{"id": "5709e20d-8dfc-41ae-981d-01de437144e3", "address": "fa:16:3e:43:47:ce", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5709e20d-8d", "ovs_interfaceid": "5709e20d-8dfc-41ae-981d-01de437144e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.877419] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9fe105b1-e6ac-4d43-ada5-bb0fed8bd683 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "refresh_cache-22a11cf9-8f85-4371-98eb-25b267c9aff7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1076.877601] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9fe105b1-e6ac-4d43-ada5-bb0fed8bd683 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquired lock "refresh_cache-22a11cf9-8f85-4371-98eb-25b267c9aff7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.877781] env[63345]: DEBUG nova.network.neutron [None req-9fe105b1-e6ac-4d43-ada5-bb0fed8bd683 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1076.877961] env[63345]: DEBUG nova.objects.instance [None req-9fe105b1-e6ac-4d43-ada5-bb0fed8bd683 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lazy-loading 'info_cache' on Instance uuid 22a11cf9-8f85-4371-98eb-25b267c9aff7 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1077.071933] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 27e2cb12-d251-434a-b79e-6fbda80d3637] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1077.262236] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "refresh_cache-7245e83c-2dda-4b2f-8a65-07f7e4d6828a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1077.262740] env[63345]: DEBUG nova.compute.manager [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Instance network_info: |[{"id": "5709e20d-8dfc-41ae-981d-01de437144e3", "address": "fa:16:3e:43:47:ce", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5709e20d-8d", "ovs_interfaceid": "5709e20d-8dfc-41ae-981d-01de437144e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1077.263285] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:47:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f77ff7a1-209c-4f3f-b2a0-fd817741e739', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5709e20d-8dfc-41ae-981d-01de437144e3', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1077.270735] env[63345]: DEBUG oslo.service.loopingcall [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1077.270952] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1077.271221] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83b8e648-d626-417d-b146-35a912510903 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.291463] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1077.291463] env[63345]: value = "task-1017832" [ 1077.291463] env[63345]: _type = "Task" [ 1077.291463] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.299265] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017832, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.354699] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "95738bee-d291-4f27-aeff-9445939bb3fa" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1077.355074] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "95738bee-d291-4f27-aeff-9445939bb3fa" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1077.355335] env[63345]: INFO nova.compute.manager [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Shelving [ 1077.381214] env[63345]: DEBUG nova.objects.base [None req-9fe105b1-e6ac-4d43-ada5-bb0fed8bd683 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Object Instance<22a11cf9-8f85-4371-98eb-25b267c9aff7> lazy-loaded attributes: flavor,info_cache {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1077.489958] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.257s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.490373] env[63345]: DEBUG nova.compute.manager [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=63345) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5238}} [ 1077.496350] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.688s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1077.498353] env[63345]: INFO nova.compute.claims [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1077.501621] env[63345]: DEBUG nova.compute.manager [req-22c2d454-1f62-4455-baad-212f49c2a7eb req-c7916d07-2664-44cd-9e23-c6d58a9eaf84 service nova] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Received event network-changed-5709e20d-8dfc-41ae-981d-01de437144e3 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1077.501813] env[63345]: DEBUG nova.compute.manager [req-22c2d454-1f62-4455-baad-212f49c2a7eb req-c7916d07-2664-44cd-9e23-c6d58a9eaf84 service nova] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Refreshing instance network info cache due to event network-changed-5709e20d-8dfc-41ae-981d-01de437144e3. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1077.502046] env[63345]: DEBUG oslo_concurrency.lockutils [req-22c2d454-1f62-4455-baad-212f49c2a7eb req-c7916d07-2664-44cd-9e23-c6d58a9eaf84 service nova] Acquiring lock "refresh_cache-7245e83c-2dda-4b2f-8a65-07f7e4d6828a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1077.502202] env[63345]: DEBUG oslo_concurrency.lockutils [req-22c2d454-1f62-4455-baad-212f49c2a7eb req-c7916d07-2664-44cd-9e23-c6d58a9eaf84 service nova] Acquired lock "refresh_cache-7245e83c-2dda-4b2f-8a65-07f7e4d6828a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.502427] env[63345]: DEBUG nova.network.neutron [req-22c2d454-1f62-4455-baad-212f49c2a7eb req-c7916d07-2664-44cd-9e23-c6d58a9eaf84 service nova] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Refreshing network info cache for port 5709e20d-8dfc-41ae-981d-01de437144e3 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1077.575368] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 0fccfe51-ee7f-4e0f-b8bd-996c0ff94b01] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1077.801941] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017832, 'name': CreateVM_Task, 'duration_secs': 0.3469} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.802295] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1077.803015] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1077.803226] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.803609] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1077.803828] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-817ac046-9f70-4aac-9be2-3b503e46991b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.808121] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1077.808121] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c542f0-26d6-63a6-dcc9-116363804fae" [ 1077.808121] env[63345]: _type = "Task" [ 1077.808121] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.815516] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c542f0-26d6-63a6-dcc9-116363804fae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.063379] env[63345]: INFO nova.scheduler.client.report [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Deleted allocation for migration 6995199b-b85e-43ee-9f2c-baf2244d91a8 [ 1078.079185] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: a415d4f2-abc7-4553-8442-312316e686b2] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1078.115894] env[63345]: DEBUG nova.network.neutron [None req-9fe105b1-e6ac-4d43-ada5-bb0fed8bd683 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Updating instance_info_cache with network_info: [{"id": "267ad158-547a-4d3a-a838-3d964626d731", "address": "fa:16:3e:9e:ba:8d", "network": {"id": "13df4553-212e-4adb-8de0-da1acdf99671", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-238696814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4560e378b6aa47a3bbb5a2f7c5b76f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap267ad158-54", "ovs_interfaceid": "267ad158-547a-4d3a-a838-3d964626d731", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.319758] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c542f0-26d6-63a6-dcc9-116363804fae, 'name': SearchDatastore_Task, 'duration_secs': 0.012367} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.320089] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1078.320339] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1078.320583] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.320738] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.320925] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1078.321232] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7892bd5-f70a-4606-90b4-3202569100d5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.330459] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1078.330659] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1078.331341] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00817276-7013-4b5d-a23d-d82c41d83821 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.336745] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1078.336745] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52687de2-9249-387d-f771-8eca852000ba" [ 1078.336745] env[63345]: _type = "Task" [ 1078.336745] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.344665] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52687de2-9249-387d-f771-8eca852000ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.364430] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1078.364694] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-741abd38-fb60-4f11-b6a8-0e121855b7ef {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.374024] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1078.374024] env[63345]: value = "task-1017833" [ 1078.374024] env[63345]: _type = "Task" [ 1078.374024] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.380783] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017833, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.404163] env[63345]: DEBUG nova.network.neutron [req-22c2d454-1f62-4455-baad-212f49c2a7eb req-c7916d07-2664-44cd-9e23-c6d58a9eaf84 service nova] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Updated VIF entry in instance network info cache for port 5709e20d-8dfc-41ae-981d-01de437144e3. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1078.404571] env[63345]: DEBUG nova.network.neutron [req-22c2d454-1f62-4455-baad-212f49c2a7eb req-c7916d07-2664-44cd-9e23-c6d58a9eaf84 service nova] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Updating instance_info_cache with network_info: [{"id": "5709e20d-8dfc-41ae-981d-01de437144e3", "address": "fa:16:3e:43:47:ce", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5709e20d-8d", "ovs_interfaceid": "5709e20d-8dfc-41ae-981d-01de437144e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.568801] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f8c4c6ce-9e24-4f82-bd8a-8bf7f9069f6b tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "726332dd-8699-49a4-a9ea-b9cbfc159855" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 12.575s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.583335] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: ece7ff3d-1eb3-4d9c-9b6e-fdb23471b2a6] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1078.620689] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9fe105b1-e6ac-4d43-ada5-bb0fed8bd683 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Releasing lock "refresh_cache-22a11cf9-8f85-4371-98eb-25b267c9aff7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1078.650707] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4249c9a8-257d-42b5-93c3-27a319ea36e6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.658356] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8f9735-a509-436d-8e39-2090dade1659 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.688084] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db22e422-2044-48dc-baf5-06881e52240d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.694582] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef49427-582d-4d33-b380-201ac245e756 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.707215] env[63345]: DEBUG nova.compute.provider_tree [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.847328] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52687de2-9249-387d-f771-8eca852000ba, 'name': SearchDatastore_Task, 'duration_secs': 0.008655} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.848128] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ddaa315-085c-43d4-9d1a-ac595ff41c55 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.852889] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1078.852889] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52082770-cd5d-2168-cce5-0bafae36d029" [ 1078.852889] env[63345]: _type = "Task" [ 1078.852889] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.860011] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52082770-cd5d-2168-cce5-0bafae36d029, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.879922] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017833, 'name': PowerOffVM_Task, 'duration_secs': 0.20609} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.880190] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1078.880915] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2854ae7-9d89-4178-a9c1-d432824175b9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.898115] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f92744-5291-4514-9146-c00d67848c5c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.907388] env[63345]: DEBUG oslo_concurrency.lockutils [req-22c2d454-1f62-4455-baad-212f49c2a7eb req-c7916d07-2664-44cd-9e23-c6d58a9eaf84 service nova] Releasing lock "refresh_cache-7245e83c-2dda-4b2f-8a65-07f7e4d6828a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1078.978979] env[63345]: DEBUG nova.objects.instance [None req-64308bdd-1806-4959-98cc-eb8b992f8a81 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lazy-loading 'flavor' on Instance uuid 726332dd-8699-49a4-a9ea-b9cbfc159855 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.085693] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 14198777-9091-4c69-8928-c83135acc7d2] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1079.210029] env[63345]: DEBUG nova.scheduler.client.report [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1079.363370] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52082770-cd5d-2168-cce5-0bafae36d029, 'name': SearchDatastore_Task, 'duration_secs': 0.008399} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.363644] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1079.363908] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 7245e83c-2dda-4b2f-8a65-07f7e4d6828a/7245e83c-2dda-4b2f-8a65-07f7e4d6828a.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1079.364185] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3849192-0c9f-4cd1-a9fc-cc1e3ae41373 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.371544] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1079.371544] env[63345]: value = "task-1017834" [ 1079.371544] env[63345]: _type = "Task" [ 1079.371544] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.378834] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017834, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.408263] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Creating Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1079.408817] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-20f424d4-58a2-43b0-9678-09adb62f3e4b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.415196] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1079.415196] env[63345]: value = "task-1017835" [ 1079.415196] env[63345]: _type = "Task" [ 1079.415196] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.423231] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017835, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.484580] env[63345]: DEBUG oslo_concurrency.lockutils [None req-64308bdd-1806-4959-98cc-eb8b992f8a81 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1079.484800] env[63345]: DEBUG oslo_concurrency.lockutils [None req-64308bdd-1806-4959-98cc-eb8b992f8a81 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.484986] env[63345]: DEBUG nova.network.neutron [None req-64308bdd-1806-4959-98cc-eb8b992f8a81 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1079.485199] env[63345]: DEBUG nova.objects.instance [None req-64308bdd-1806-4959-98cc-eb8b992f8a81 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lazy-loading 'info_cache' on Instance uuid 726332dd-8699-49a4-a9ea-b9cbfc159855 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.589052] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 0da64b45-fa00-4fe8-8d1d-df586f27743f] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1079.626977] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fe105b1-e6ac-4d43-ada5-bb0fed8bd683 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1079.627854] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-888b4ff6-e75c-4493-8a68-24d37aa38f9b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.636685] env[63345]: DEBUG oslo_vmware.api [None req-9fe105b1-e6ac-4d43-ada5-bb0fed8bd683 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1079.636685] env[63345]: value = "task-1017836" [ 1079.636685] env[63345]: _type = "Task" [ 1079.636685] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.645045] env[63345]: DEBUG oslo_vmware.api [None req-9fe105b1-e6ac-4d43-ada5-bb0fed8bd683 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017836, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.714428] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.218s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.714910] env[63345]: DEBUG nova.compute.manager [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1079.718511] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.661s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.718931] env[63345]: DEBUG nova.objects.instance [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lazy-loading 'resources' on Instance uuid bce78147-6f6d-47a2-84f3-482f59a8bb8e {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.882506] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017834, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.452135} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.882774] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 7245e83c-2dda-4b2f-8a65-07f7e4d6828a/7245e83c-2dda-4b2f-8a65-07f7e4d6828a.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1079.882995] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1079.883318] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c73d4c9-e2ee-4a06-bd83-e2cb78482d28 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.890015] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1079.890015] env[63345]: value = "task-1017837" [ 1079.890015] env[63345]: _type = "Task" [ 1079.890015] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.899339] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017837, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.924923] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017835, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.988655] env[63345]: DEBUG nova.objects.base [None req-64308bdd-1806-4959-98cc-eb8b992f8a81 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Object Instance<726332dd-8699-49a4-a9ea-b9cbfc159855> lazy-loaded attributes: flavor,info_cache {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1080.092541] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 5e20b33c-1481-4bd3-b269-29a70cc3150d] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1080.146356] env[63345]: DEBUG oslo_vmware.api [None req-9fe105b1-e6ac-4d43-ada5-bb0fed8bd683 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017836, 'name': PowerOnVM_Task, 'duration_secs': 0.432411} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.146659] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fe105b1-e6ac-4d43-ada5-bb0fed8bd683 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1080.146822] env[63345]: DEBUG nova.compute.manager [None req-9fe105b1-e6ac-4d43-ada5-bb0fed8bd683 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1080.147847] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c2f1a7-a6dd-4ddd-948f-521f6fe4e39e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.220018] env[63345]: DEBUG nova.compute.utils [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1080.221530] env[63345]: DEBUG nova.compute.manager [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1080.221735] env[63345]: DEBUG nova.network.neutron [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1080.264792] env[63345]: DEBUG nova.policy [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22ef13d4324a4357bcbd6fc6d755c101', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a7aaf150ea243b6a38a4b14f265bd4d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 1080.352139] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-476c29a3-b340-4155-a8bb-424b4f10c821 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.360983] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f7d630-52df-45b1-9bb1-33a9b8673a4a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.396666] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be057e03-24a8-46ae-b083-24205cfaf4d2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.403970] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017837, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.053279} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.406089] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1080.406862] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02467fe-cac9-499d-a2a1-f38a28880161 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.410061] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e78e5c8-031d-4e0f-bf2f-c463c570cfb5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.443428] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 7245e83c-2dda-4b2f-8a65-07f7e4d6828a/7245e83c-2dda-4b2f-8a65-07f7e4d6828a.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1080.443950] env[63345]: DEBUG nova.compute.provider_tree [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1080.447865] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-325b7434-e926-4b10-94fe-3b19fcd1c69c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.468763] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017835, 'name': CreateSnapshot_Task, 'duration_secs': 0.517903} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.469912] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Created Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1080.470258] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1080.470258] env[63345]: value = "task-1017838" [ 1080.470258] env[63345]: _type = "Task" [ 1080.470258] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.470935] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd887ac-b4d8-488f-854e-0726abe712cc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.485512] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017838, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.587734] env[63345]: DEBUG nova.network.neutron [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Successfully created port: f2837ec1-0df3-454a-bc68-fb0ca9562eb4 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1080.596176] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 75fc8365-bf8d-489e-935f-a5169c6a7e62] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1080.717863] env[63345]: DEBUG nova.network.neutron [None req-64308bdd-1806-4959-98cc-eb8b992f8a81 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updating instance_info_cache with network_info: [{"id": "114e38e0-a558-4242-ad5b-4aac063dcb72", "address": "fa:16:3e:bb:2c:f8", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap114e38e0-a5", "ovs_interfaceid": "114e38e0-a558-4242-ad5b-4aac063dcb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.727566] env[63345]: DEBUG nova.compute.manager [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1080.964664] env[63345]: DEBUG nova.scheduler.client.report [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1080.984201] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017838, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.992941] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Creating linked-clone VM from snapshot {{(pid=63345) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1080.993528] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6168e332-08a9-4960-9ff6-6ae7926f379c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.002631] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1081.002631] env[63345]: value = "task-1017839" [ 1081.002631] env[63345]: _type = "Task" [ 1081.002631] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.010338] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017839, 'name': CloneVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.098915] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 1e349d03-6cae-4322-9941-d48c52c21c0e] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1081.220907] env[63345]: DEBUG oslo_concurrency.lockutils [None req-64308bdd-1806-4959-98cc-eb8b992f8a81 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "refresh_cache-726332dd-8699-49a4-a9ea-b9cbfc159855" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1081.469770] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.751s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.472183] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.342s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.473690] env[63345]: INFO nova.compute.claims [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1081.485015] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017838, 'name': ReconfigVM_Task, 'duration_secs': 0.915429} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.485282] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 7245e83c-2dda-4b2f-8a65-07f7e4d6828a/7245e83c-2dda-4b2f-8a65-07f7e4d6828a.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1081.485883] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2f36a75-84f4-4ef8-b59b-c56bfa39b79b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.490597] env[63345]: INFO nova.scheduler.client.report [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleted allocations for instance bce78147-6f6d-47a2-84f3-482f59a8bb8e [ 1081.495229] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1081.495229] env[63345]: value = "task-1017840" [ 1081.495229] env[63345]: _type = "Task" [ 1081.495229] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.503569] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017840, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.511686] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017839, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.602113] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: a3f34e0e-2969-406f-a086-a925549e458e] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1081.736231] env[63345]: DEBUG nova.compute.manager [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1081.761524] env[63345]: DEBUG nova.virt.hardware [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1081.761923] env[63345]: DEBUG nova.virt.hardware [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1081.762139] env[63345]: DEBUG nova.virt.hardware [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1081.762338] env[63345]: DEBUG nova.virt.hardware [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1081.762499] env[63345]: DEBUG nova.virt.hardware [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1081.762657] env[63345]: DEBUG nova.virt.hardware [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1081.762874] env[63345]: DEBUG nova.virt.hardware [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1081.763051] env[63345]: DEBUG nova.virt.hardware [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1081.763286] env[63345]: DEBUG nova.virt.hardware [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1081.763487] env[63345]: DEBUG nova.virt.hardware [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1081.763663] env[63345]: DEBUG nova.virt.hardware [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1081.764552] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e136056f-2570-4dde-98e0-112687d98948 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.772958] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c31049-026e-435a-89d5-1c0d95bd3751 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.003365] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0e2f79e0-206b-4041-87a3-fc45db2d4918 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "bce78147-6f6d-47a2-84f3-482f59a8bb8e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.897s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.004700] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "bce78147-6f6d-47a2-84f3-482f59a8bb8e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 15.514s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.005062] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "bce78147-6f6d-47a2-84f3-482f59a8bb8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.005284] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "bce78147-6f6d-47a2-84f3-482f59a8bb8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.005461] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "bce78147-6f6d-47a2-84f3-482f59a8bb8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.007106] env[63345]: INFO nova.compute.manager [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Terminating instance [ 1082.014674] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017840, 'name': Rename_Task, 'duration_secs': 0.151721} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.016496] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1082.017097] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7846920e-dec3-451e-8d51-3ef8649e1b30 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.022629] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017839, 'name': CloneVM_Task} progress is 95%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.028376] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1082.028376] env[63345]: value = "task-1017841" [ 1082.028376] env[63345]: _type = "Task" [ 1082.028376] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.035923] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017841, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.105667] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: f37b4a95-0725-4a84-b726-fd4f26e87020] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1082.109411] env[63345]: DEBUG nova.compute.manager [req-f706fee2-d40e-4e14-b6c5-e029a07d913c req-1c21afaf-9511-4cf2-bee0-2b1f8b019283 service nova] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Received event network-vif-plugged-f2837ec1-0df3-454a-bc68-fb0ca9562eb4 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1082.109657] env[63345]: DEBUG oslo_concurrency.lockutils [req-f706fee2-d40e-4e14-b6c5-e029a07d913c req-1c21afaf-9511-4cf2-bee0-2b1f8b019283 service nova] Acquiring lock "148c961e-d260-4dbd-ad9f-52f94b072096-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.109876] env[63345]: DEBUG oslo_concurrency.lockutils [req-f706fee2-d40e-4e14-b6c5-e029a07d913c req-1c21afaf-9511-4cf2-bee0-2b1f8b019283 service nova] Lock "148c961e-d260-4dbd-ad9f-52f94b072096-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.110338] env[63345]: DEBUG oslo_concurrency.lockutils [req-f706fee2-d40e-4e14-b6c5-e029a07d913c req-1c21afaf-9511-4cf2-bee0-2b1f8b019283 service nova] Lock "148c961e-d260-4dbd-ad9f-52f94b072096-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.110338] env[63345]: DEBUG nova.compute.manager [req-f706fee2-d40e-4e14-b6c5-e029a07d913c req-1c21afaf-9511-4cf2-bee0-2b1f8b019283 service nova] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] No waiting events found dispatching network-vif-plugged-f2837ec1-0df3-454a-bc68-fb0ca9562eb4 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1082.110449] env[63345]: WARNING nova.compute.manager [req-f706fee2-d40e-4e14-b6c5-e029a07d913c req-1c21afaf-9511-4cf2-bee0-2b1f8b019283 service nova] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Received unexpected event network-vif-plugged-f2837ec1-0df3-454a-bc68-fb0ca9562eb4 for instance with vm_state building and task_state spawning. [ 1082.227645] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-64308bdd-1806-4959-98cc-eb8b992f8a81 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1082.227899] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-569dba91-96f6-4d18-a985-ca8c98577314 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.235731] env[63345]: DEBUG oslo_vmware.api [None req-64308bdd-1806-4959-98cc-eb8b992f8a81 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1082.235731] env[63345]: value = "task-1017842" [ 1082.235731] env[63345]: _type = "Task" [ 1082.235731] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.244522] env[63345]: DEBUG oslo_vmware.api [None req-64308bdd-1806-4959-98cc-eb8b992f8a81 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017842, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.259319] env[63345]: DEBUG nova.network.neutron [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Successfully updated port: f2837ec1-0df3-454a-bc68-fb0ca9562eb4 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1082.519783] env[63345]: DEBUG nova.compute.manager [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1082.520171] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017839, 'name': CloneVM_Task, 'duration_secs': 1.156281} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.523611] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a689b3b2-0d5e-48ed-9078-f47904751b3a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.525551] env[63345]: INFO nova.virt.vmwareapi.vmops [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Created linked-clone VM from snapshot [ 1082.526540] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3858ff5a-54e2-4238-825d-cc0f57cd38d5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.537190] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df8b06b7-08ed-46f5-94c6-af64b13c1f8d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.557585] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Uploading image 00f54121-1c47-489a-9345-a57300eace29 {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1082.559610] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017841, 'name': PowerOnVM_Task} progress is 92%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.582780] env[63345]: WARNING nova.virt.vmwareapi.driver [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance bce78147-6f6d-47a2-84f3-482f59a8bb8e could not be found. [ 1082.583013] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1082.587478] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c7693f3c-7c0c-48c2-baf8-bfc053cd3818 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.597203] env[63345]: DEBUG oslo_vmware.rw_handles [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1082.597203] env[63345]: value = "vm-226166" [ 1082.597203] env[63345]: _type = "VirtualMachine" [ 1082.597203] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1082.597547] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-eb5631d3-f5b2-4f32-bb29-190a055ad6ef {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.603993] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9a7f475-a98f-4f34-a021-14df3b3f2c98 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.615392] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: dde93fd5-6312-4d91-b041-b7fc84b207d3] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1082.639953] env[63345]: DEBUG oslo_vmware.rw_handles [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lease: (returnval){ [ 1082.639953] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d241c3-165f-29f3-414a-513516c6687a" [ 1082.639953] env[63345]: _type = "HttpNfcLease" [ 1082.639953] env[63345]: } obtained for exporting VM: (result){ [ 1082.639953] env[63345]: value = "vm-226166" [ 1082.639953] env[63345]: _type = "VirtualMachine" [ 1082.639953] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1082.640313] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the lease: (returnval){ [ 1082.640313] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d241c3-165f-29f3-414a-513516c6687a" [ 1082.640313] env[63345]: _type = "HttpNfcLease" [ 1082.640313] env[63345]: } to be ready. {{(pid=63345) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1082.656639] env[63345]: WARNING nova.virt.vmwareapi.vmops [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bce78147-6f6d-47a2-84f3-482f59a8bb8e could not be found. [ 1082.656860] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1082.657114] env[63345]: INFO nova.compute.manager [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Took 0.14 seconds to destroy the instance on the hypervisor. [ 1082.657324] env[63345]: DEBUG oslo.service.loopingcall [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1082.659015] env[63345]: DEBUG nova.compute.manager [-] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1082.659129] env[63345]: DEBUG nova.network.neutron [-] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1082.660747] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1082.660747] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d241c3-165f-29f3-414a-513516c6687a" [ 1082.660747] env[63345]: _type = "HttpNfcLease" [ 1082.660747] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1082.673578] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f476f926-4ca3-425b-8dff-00e22673cf1d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.682539] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c95a480-028d-4e93-8448-dae2e0802fe9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.716969] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1664274-b444-4a28-9038-5bccf41090a3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.725928] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ebf706-d64f-46d2-9800-6dcab957ca56 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.741543] env[63345]: DEBUG nova.compute.provider_tree [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.751878] env[63345]: DEBUG oslo_vmware.api [None req-64308bdd-1806-4959-98cc-eb8b992f8a81 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017842, 'name': PowerOnVM_Task, 'duration_secs': 0.42696} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.752358] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-64308bdd-1806-4959-98cc-eb8b992f8a81 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1082.752618] env[63345]: DEBUG nova.compute.manager [None req-64308bdd-1806-4959-98cc-eb8b992f8a81 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1082.753509] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f510f5-0a94-4639-80e2-d0d8a2e20f7d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.762146] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "refresh_cache-148c961e-d260-4dbd-ad9f-52f94b072096" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.762344] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired lock "refresh_cache-148c961e-d260-4dbd-ad9f-52f94b072096" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.762555] env[63345]: DEBUG nova.network.neutron [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1082.999020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "95ef4f91-a618-4ae2-95ad-d027c031f239" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.999020] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "95ef4f91-a618-4ae2-95ad-d027c031f239" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.041167] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017841, 'name': PowerOnVM_Task} progress is 92%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.119060] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 40d228ea-881e-4442-a16a-6758d061aa39] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1083.149362] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1083.149362] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d241c3-165f-29f3-414a-513516c6687a" [ 1083.149362] env[63345]: _type = "HttpNfcLease" [ 1083.149362] env[63345]: } is ready. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1083.149659] env[63345]: DEBUG oslo_vmware.rw_handles [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1083.149659] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d241c3-165f-29f3-414a-513516c6687a" [ 1083.149659] env[63345]: _type = "HttpNfcLease" [ 1083.149659] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1083.150499] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec45b82-2718-4e9c-a7d9-54c2d9fb0a0d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.158347] env[63345]: DEBUG oslo_vmware.rw_handles [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52eb4aa8-fa46-ecee-a4f6-9877ae9571c6/disk-0.vmdk from lease info. {{(pid=63345) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1083.158628] env[63345]: DEBUG oslo_vmware.rw_handles [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52eb4aa8-fa46-ecee-a4f6-9877ae9571c6/disk-0.vmdk for reading. {{(pid=63345) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1083.214504] env[63345]: DEBUG nova.network.neutron [-] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.249385] env[63345]: DEBUG nova.scheduler.client.report [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1083.254129] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b0a52fbe-8b13-4212-9612-0cdc8ad32593 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.311975] env[63345]: DEBUG nova.network.neutron [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1083.500965] env[63345]: DEBUG nova.network.neutron [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Updating instance_info_cache with network_info: [{"id": "f2837ec1-0df3-454a-bc68-fb0ca9562eb4", "address": "fa:16:3e:85:d4:0e", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2837ec1-0d", "ovs_interfaceid": "f2837ec1-0df3-454a-bc68-fb0ca9562eb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.503329] env[63345]: DEBUG nova.compute.utils [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1083.542316] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017841, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.623541] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 691ca6b4-e6fa-4b7e-89d9-4ee4fbc73e0f] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1083.720195] env[63345]: INFO nova.compute.manager [-] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Took 1.06 seconds to deallocate network for instance. [ 1083.720762] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "726332dd-8699-49a4-a9ea-b9cbfc159855" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.721347] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "726332dd-8699-49a4-a9ea-b9cbfc159855" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.721799] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "726332dd-8699-49a4-a9ea-b9cbfc159855-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.722073] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "726332dd-8699-49a4-a9ea-b9cbfc159855-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.722301] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "726332dd-8699-49a4-a9ea-b9cbfc159855-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.726756] env[63345]: INFO nova.compute.manager [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Terminating instance [ 1083.740614] env[63345]: WARNING nova.volume.cinder [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Attachment e0bfa2bd-844e-4580-96fb-30414a149211 does not exist. Ignoring.: cinderclient.exceptions.NotFound: Volume attachment could not be found with filter: attachment_id = e0bfa2bd-844e-4580-96fb-30414a149211. (HTTP 404) (Request-ID: req-dfada49d-a7fa-44df-8164-d5dfcd7a4e0b) [ 1083.740911] env[63345]: INFO nova.compute.manager [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Took 0.02 seconds to detach 1 volumes for instance. [ 1083.743054] env[63345]: DEBUG nova.compute.manager [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Deleting volume: 20d77566-3841-4d48-8c1e-d94d3b3b3333 {{(pid=63345) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3281}} [ 1083.754439] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.282s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.754953] env[63345]: DEBUG nova.compute.manager [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1083.759714] env[63345]: WARNING nova.compute.manager [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Failed to delete volume: 20d77566-3841-4d48-8c1e-d94d3b3b3333 due to Volume 20d77566-3841-4d48-8c1e-d94d3b3b3333 could not be found.: nova.exception.VolumeNotFound: Volume 20d77566-3841-4d48-8c1e-d94d3b3b3333 could not be found. [ 1083.762521] env[63345]: DEBUG oslo_concurrency.lockutils [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.496s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.763913] env[63345]: DEBUG nova.objects.instance [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lazy-loading 'resources' on Instance uuid 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1084.005013] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Releasing lock "refresh_cache-148c961e-d260-4dbd-ad9f-52f94b072096" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.005434] env[63345]: DEBUG nova.compute.manager [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Instance network_info: |[{"id": "f2837ec1-0df3-454a-bc68-fb0ca9562eb4", "address": "fa:16:3e:85:d4:0e", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2837ec1-0d", "ovs_interfaceid": "f2837ec1-0df3-454a-bc68-fb0ca9562eb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1084.006178] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:d4:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3753f451-fa23-4988-9361-074fb0bd3fd4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f2837ec1-0df3-454a-bc68-fb0ca9562eb4', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1084.014161] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Creating folder: Project (4a7aaf150ea243b6a38a4b14f265bd4d). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1084.014819] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "95ef4f91-a618-4ae2-95ad-d027c031f239" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.017s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.015324] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8825da7-a1a4-44ab-bbab-71741f243373 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.028703] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Created folder: Project (4a7aaf150ea243b6a38a4b14f265bd4d) in parent group-v225918. [ 1084.029026] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Creating folder: Instances. Parent ref: group-v226167. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1084.029307] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c3cc1b5-af68-44ee-9468-c4297362ce01 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.040244] env[63345]: DEBUG oslo_vmware.api [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017841, 'name': PowerOnVM_Task, 'duration_secs': 1.539866} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.041689] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1084.042028] env[63345]: INFO nova.compute.manager [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Took 8.76 seconds to spawn the instance on the hypervisor. [ 1084.042249] env[63345]: DEBUG nova.compute.manager [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1084.042665] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Created folder: Instances in parent group-v226167. [ 1084.042947] env[63345]: DEBUG oslo.service.loopingcall [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1084.043761] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f25f7a-da69-4c45-8502-a3e832fa2faf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.046390] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1084.046689] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc4dc75c-d50e-4403-977a-7907947d5af8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.068905] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1084.068905] env[63345]: value = "task-1017846" [ 1084.068905] env[63345]: _type = "Task" [ 1084.068905] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.078194] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017846, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.127228] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 37f269fe-0266-4c03-9641-e6f43072657a] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1084.135982] env[63345]: DEBUG nova.compute.manager [req-1d8104b8-01a3-408b-83e6-d7c4b2ced048 req-aea74f95-63fc-4235-a7f5-18628099d8d9 service nova] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Received event network-changed-f2837ec1-0df3-454a-bc68-fb0ca9562eb4 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1084.137212] env[63345]: DEBUG nova.compute.manager [req-1d8104b8-01a3-408b-83e6-d7c4b2ced048 req-aea74f95-63fc-4235-a7f5-18628099d8d9 service nova] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Refreshing instance network info cache due to event network-changed-f2837ec1-0df3-454a-bc68-fb0ca9562eb4. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1084.137212] env[63345]: DEBUG oslo_concurrency.lockutils [req-1d8104b8-01a3-408b-83e6-d7c4b2ced048 req-aea74f95-63fc-4235-a7f5-18628099d8d9 service nova] Acquiring lock "refresh_cache-148c961e-d260-4dbd-ad9f-52f94b072096" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.137212] env[63345]: DEBUG oslo_concurrency.lockutils [req-1d8104b8-01a3-408b-83e6-d7c4b2ced048 req-aea74f95-63fc-4235-a7f5-18628099d8d9 service nova] Acquired lock "refresh_cache-148c961e-d260-4dbd-ad9f-52f94b072096" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.137319] env[63345]: DEBUG nova.network.neutron [req-1d8104b8-01a3-408b-83e6-d7c4b2ced048 req-aea74f95-63fc-4235-a7f5-18628099d8d9 service nova] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Refreshing network info cache for port f2837ec1-0df3-454a-bc68-fb0ca9562eb4 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1084.230701] env[63345]: DEBUG nova.compute.manager [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1084.231169] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1084.232174] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eef9116-e7b1-49e3-b499-45c1b0a7cf2e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.240997] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1084.241474] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ac05410-536d-44e5-a824-a57973b9ed34 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.249129] env[63345]: DEBUG oslo_vmware.api [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1084.249129] env[63345]: value = "task-1017847" [ 1084.249129] env[63345]: _type = "Task" [ 1084.249129] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.259181] env[63345]: DEBUG oslo_vmware.api [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017847, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.265880] env[63345]: DEBUG nova.compute.utils [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1084.270705] env[63345]: INFO nova.compute.manager [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: bce78147-6f6d-47a2-84f3-482f59a8bb8e] Instance disappeared during terminate [ 1084.270938] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9c0392e3-9707-4d9f-8983-046bdb199563 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "bce78147-6f6d-47a2-84f3-482f59a8bb8e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.266s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.272719] env[63345]: DEBUG nova.compute.manager [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1084.273067] env[63345]: DEBUG nova.network.neutron [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1084.319545] env[63345]: DEBUG nova.policy [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22ef13d4324a4357bcbd6fc6d755c101', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a7aaf150ea243b6a38a4b14f265bd4d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 1084.429597] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d22af7-b9d4-4cde-9e50-258e98a19c25 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.438942] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb025c5a-362c-4c67-97a2-dd30ebf3447d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.473292] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de4f233-bcc1-4e6e-97b6-e66d5f2358b4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.483724] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd05ce7-3fc0-478a-946f-fa64f3f3661f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.502739] env[63345]: DEBUG nova.compute.provider_tree [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.580182] env[63345]: INFO nova.compute.manager [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Took 18.45 seconds to build instance. [ 1084.594036] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017846, 'name': CreateVM_Task, 'duration_secs': 0.44887} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.596150] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "95ef4f91-a618-4ae2-95ad-d027c031f239" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.596915] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "95ef4f91-a618-4ae2-95ad-d027c031f239" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.596915] env[63345]: INFO nova.compute.manager [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Attaching volume eb73c317-f521-4815-9091-7618b879a9fd to /dev/sdb [ 1084.598710] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1084.600017] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.600363] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.600597] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1084.600901] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77151ff0-1eae-484b-95b9-bdd603ab0481 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.608032] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1084.608032] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b26a19-e419-bc92-1762-66d046d9bca5" [ 1084.608032] env[63345]: _type = "Task" [ 1084.608032] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.618205] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b26a19-e419-bc92-1762-66d046d9bca5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.630262] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: df2f06af-54a6-4dbd-83ff-1e4b066acbf3] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1084.636660] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52b9700-a56f-4ffa-9c07-3468d9563109 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.647858] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4847a7ab-6867-4bc5-8a58-7fa934ecb4c9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.671613] env[63345]: DEBUG nova.virt.block_device [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Updating existing volume attachment record: 8b5c1ef5-8b3c-48aa-9756-ec1e24862464 {{(pid=63345) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1084.762090] env[63345]: DEBUG oslo_vmware.api [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017847, 'name': PowerOffVM_Task, 'duration_secs': 0.177865} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.764412] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1084.764412] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1084.764412] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-924a27a3-87b9-4d61-ac10-e2be426aec1a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.773559] env[63345]: DEBUG nova.compute.manager [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1084.796218] env[63345]: DEBUG nova.network.neutron [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Successfully created port: 24c75109-1060-4770-8c15-3bce8002f3e0 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1084.898341] env[63345]: DEBUG nova.network.neutron [req-1d8104b8-01a3-408b-83e6-d7c4b2ced048 req-aea74f95-63fc-4235-a7f5-18628099d8d9 service nova] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Updated VIF entry in instance network info cache for port f2837ec1-0df3-454a-bc68-fb0ca9562eb4. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1084.898810] env[63345]: DEBUG nova.network.neutron [req-1d8104b8-01a3-408b-83e6-d7c4b2ced048 req-aea74f95-63fc-4235-a7f5-18628099d8d9 service nova] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Updating instance_info_cache with network_info: [{"id": "f2837ec1-0df3-454a-bc68-fb0ca9562eb4", "address": "fa:16:3e:85:d4:0e", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2837ec1-0d", "ovs_interfaceid": "f2837ec1-0df3-454a-bc68-fb0ca9562eb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.007333] env[63345]: DEBUG nova.scheduler.client.report [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1085.082322] env[63345]: DEBUG oslo_concurrency.lockutils [None req-ac77230f-dc82-49c8-93fe-351ed0a0bc24 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "7245e83c-2dda-4b2f-8a65-07f7e4d6828a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.960s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.104538] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1085.104982] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1085.105330] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Deleting the datastore file [datastore2] 726332dd-8699-49a4-a9ea-b9cbfc159855 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1085.105782] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e8ce4f2-da88-4e00-a620-15204f11dcf6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.115243] env[63345]: DEBUG oslo_vmware.api [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1085.115243] env[63345]: value = "task-1017850" [ 1085.115243] env[63345]: _type = "Task" [ 1085.115243] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.124030] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b26a19-e419-bc92-1762-66d046d9bca5, 'name': SearchDatastore_Task, 'duration_secs': 0.011468} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.124923] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.125202] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1085.125473] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1085.125635] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.125837] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1085.126137] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7b9129d-b2ab-42cd-adf1-edaddbc45a4f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.131366] env[63345]: DEBUG oslo_vmware.api [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017850, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.133383] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: a85688b0-d68f-4370-bd95-dc9fb1d2c26a] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1085.139236] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1085.139465] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1085.140309] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64355ec5-760e-40b5-9f9e-11457dc598de {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.146962] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1085.146962] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52e87541-cbd6-618a-2bdf-ff2288e0e7e9" [ 1085.146962] env[63345]: _type = "Task" [ 1085.146962] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.157223] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e87541-cbd6-618a-2bdf-ff2288e0e7e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.401830] env[63345]: DEBUG oslo_concurrency.lockutils [req-1d8104b8-01a3-408b-83e6-d7c4b2ced048 req-aea74f95-63fc-4235-a7f5-18628099d8d9 service nova] Releasing lock "refresh_cache-148c961e-d260-4dbd-ad9f-52f94b072096" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.512752] env[63345]: DEBUG oslo_concurrency.lockutils [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.750s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.530706] env[63345]: INFO nova.scheduler.client.report [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleted allocations for instance 0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a [ 1085.639726] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 9aa651b8-317d-4153-8c33-9df0a5d16115] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1085.641665] env[63345]: DEBUG oslo_vmware.api [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017850, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.246579} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.642158] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1085.642390] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1085.642616] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1085.642810] env[63345]: INFO nova.compute.manager [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Took 1.41 seconds to destroy the instance on the hypervisor. [ 1085.643084] env[63345]: DEBUG oslo.service.loopingcall [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1085.643345] env[63345]: DEBUG nova.compute.manager [-] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1085.643450] env[63345]: DEBUG nova.network.neutron [-] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1085.659269] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52e87541-cbd6-618a-2bdf-ff2288e0e7e9, 'name': SearchDatastore_Task, 'duration_secs': 0.010292} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.660368] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3cc686f-d245-45d5-91c3-82a686e54886 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.667201] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1085.667201] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]521310de-6844-18ae-d180-b2e0f01c6c00" [ 1085.667201] env[63345]: _type = "Task" [ 1085.667201] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.677569] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521310de-6844-18ae-d180-b2e0f01c6c00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.787364] env[63345]: DEBUG nova.compute.manager [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1085.815127] env[63345]: DEBUG nova.virt.hardware [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1085.815127] env[63345]: DEBUG nova.virt.hardware [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1085.815127] env[63345]: DEBUG nova.virt.hardware [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1085.815326] env[63345]: DEBUG nova.virt.hardware [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1085.815376] env[63345]: DEBUG nova.virt.hardware [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1085.815524] env[63345]: DEBUG nova.virt.hardware [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1085.815763] env[63345]: DEBUG nova.virt.hardware [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1085.815933] env[63345]: DEBUG nova.virt.hardware [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1085.816122] env[63345]: DEBUG nova.virt.hardware [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1085.816293] env[63345]: DEBUG nova.virt.hardware [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1085.816472] env[63345]: DEBUG nova.virt.hardware [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1085.817369] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2a6b28-c937-46d1-9afd-6f4d61d5440e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.826342] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c9521f-804a-42ed-bb02-cc73dff5ca90 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.977444] env[63345]: DEBUG nova.compute.manager [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Stashing vm_state: active {{(pid=63345) _prep_resize /opt/stack/nova/nova/compute/manager.py:5953}} [ 1086.040059] env[63345]: DEBUG oslo_concurrency.lockutils [None req-905f62e0-c0fa-4c9f-ad5f-1a50c8c86d04 tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "0a1ae505-d6b1-4cc5-a7a1-b41ae6ba531a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.169s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.142628] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 11652422-9136-4453-b932-06695f9bc910] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1086.183851] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]521310de-6844-18ae-d180-b2e0f01c6c00, 'name': SearchDatastore_Task, 'duration_secs': 0.010399} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.184281] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1086.184657] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 148c961e-d260-4dbd-ad9f-52f94b072096/148c961e-d260-4dbd-ad9f-52f94b072096.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1086.185029] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cae07694-60a9-4c20-89b3-196ea09631e6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.194013] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1086.194013] env[63345]: value = "task-1017852" [ 1086.194013] env[63345]: _type = "Task" [ 1086.194013] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.205227] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017852, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.407936] env[63345]: DEBUG nova.compute.manager [req-181aca66-d9b3-4551-829b-f5142e76abe8 req-24ad6db6-fd32-42bf-9985-993838dbeb63 service nova] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Received event network-vif-deleted-114e38e0-a558-4242-ad5b-4aac063dcb72 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1086.409065] env[63345]: INFO nova.compute.manager [req-181aca66-d9b3-4551-829b-f5142e76abe8 req-24ad6db6-fd32-42bf-9985-993838dbeb63 service nova] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Neutron deleted interface 114e38e0-a558-4242-ad5b-4aac063dcb72; detaching it from the instance and deleting it from the info cache [ 1086.409113] env[63345]: DEBUG nova.network.neutron [req-181aca66-d9b3-4551-829b-f5142e76abe8 req-24ad6db6-fd32-42bf-9985-993838dbeb63 service nova] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.502517] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.502894] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.516422] env[63345]: DEBUG nova.compute.manager [req-7a673e2c-1d5b-4af7-9e14-dee445611f5d req-814b4f0a-5409-40ed-84fd-e686e18b9225 service nova] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Received event network-vif-plugged-24c75109-1060-4770-8c15-3bce8002f3e0 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1086.516767] env[63345]: DEBUG oslo_concurrency.lockutils [req-7a673e2c-1d5b-4af7-9e14-dee445611f5d req-814b4f0a-5409-40ed-84fd-e686e18b9225 service nova] Acquiring lock "83ef21e9-62eb-4f0d-9c0c-a038743e0dd8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.517564] env[63345]: DEBUG oslo_concurrency.lockutils [req-7a673e2c-1d5b-4af7-9e14-dee445611f5d req-814b4f0a-5409-40ed-84fd-e686e18b9225 service nova] Lock "83ef21e9-62eb-4f0d-9c0c-a038743e0dd8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.517824] env[63345]: DEBUG oslo_concurrency.lockutils [req-7a673e2c-1d5b-4af7-9e14-dee445611f5d req-814b4f0a-5409-40ed-84fd-e686e18b9225 service nova] Lock "83ef21e9-62eb-4f0d-9c0c-a038743e0dd8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.518037] env[63345]: DEBUG nova.compute.manager [req-7a673e2c-1d5b-4af7-9e14-dee445611f5d req-814b4f0a-5409-40ed-84fd-e686e18b9225 service nova] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] No waiting events found dispatching network-vif-plugged-24c75109-1060-4770-8c15-3bce8002f3e0 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1086.518314] env[63345]: WARNING nova.compute.manager [req-7a673e2c-1d5b-4af7-9e14-dee445611f5d req-814b4f0a-5409-40ed-84fd-e686e18b9225 service nova] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Received unexpected event network-vif-plugged-24c75109-1060-4770-8c15-3bce8002f3e0 for instance with vm_state building and task_state spawning. [ 1086.622893] env[63345]: DEBUG nova.network.neutron [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Successfully updated port: 24c75109-1060-4770-8c15-3bce8002f3e0 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1086.626409] env[63345]: DEBUG nova.network.neutron [-] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.649196] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 4f108dcc-c130-4c3f-840d-7a912150db3f] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1086.705059] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017852, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.912036] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b41f9617-1497-4f51-984e-43d57cc83059 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.923076] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbeb8ebf-7ee8-478c-b3b9-eaf2abd10909 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.954653] env[63345]: DEBUG nova.compute.manager [req-181aca66-d9b3-4551-829b-f5142e76abe8 req-24ad6db6-fd32-42bf-9985-993838dbeb63 service nova] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Detach interface failed, port_id=114e38e0-a558-4242-ad5b-4aac063dcb72, reason: Instance 726332dd-8699-49a4-a9ea-b9cbfc159855 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1087.008775] env[63345]: INFO nova.compute.claims [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1087.126389] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "refresh_cache-83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1087.126575] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired lock "refresh_cache-83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.127983] env[63345]: DEBUG nova.network.neutron [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1087.128489] env[63345]: INFO nova.compute.manager [-] [instance: 726332dd-8699-49a4-a9ea-b9cbfc159855] Took 1.48 seconds to deallocate network for instance. [ 1087.152783] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: fe3e2b2a-9583-482e-b69b-6c130801d7db] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1087.205454] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017852, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542219} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.205733] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 148c961e-d260-4dbd-ad9f-52f94b072096/148c961e-d260-4dbd-ad9f-52f94b072096.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1087.205957] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1087.206244] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a8c027e-9d79-4976-a6d5-4064447f5a3f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.214555] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1087.214555] env[63345]: value = "task-1017854" [ 1087.214555] env[63345]: _type = "Task" [ 1087.214555] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.225518] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017854, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.515491] env[63345]: INFO nova.compute.resource_tracker [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Updating resource usage from migration 338a27f0-4a04-4b10-9b24-9ad29ee1e81a [ 1087.639185] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.644534] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e0b612-cb3a-4325-b3c3-1f686617818b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.653331] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7abb75a-7cdb-4307-8b32-b2798d2ec29d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.656689] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: cb712d80-be78-4c19-a891-329011521f30] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1087.687830] env[63345]: DEBUG nova.network.neutron [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1087.690326] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd47ccd-9ba5-43e4-9bd1-934cebb5d3db {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.701547] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d67c11-d79c-426b-946f-fd58a9f01301 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.719855] env[63345]: DEBUG nova.compute.provider_tree [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1087.730120] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017854, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090779} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.731641] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1087.731946] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ca3996-c3a8-4631-b38b-d444d6d756ad {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.756085] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 148c961e-d260-4dbd-ad9f-52f94b072096/148c961e-d260-4dbd-ad9f-52f94b072096.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1087.759036] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dca37a3a-895b-4e7e-96d2-ef03f1d4b714 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.781207] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1087.781207] env[63345]: value = "task-1017855" [ 1087.781207] env[63345]: _type = "Task" [ 1087.781207] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.790217] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017855, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.882360] env[63345]: DEBUG nova.network.neutron [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Updating instance_info_cache with network_info: [{"id": "24c75109-1060-4770-8c15-3bce8002f3e0", "address": "fa:16:3e:56:92:00", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24c75109-10", "ovs_interfaceid": "24c75109-1060-4770-8c15-3bce8002f3e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.160606] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 00c58889-75f7-4a4b-a5a3-a45723c1f495] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1088.226127] env[63345]: DEBUG nova.scheduler.client.report [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1088.293848] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017855, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.384787] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Releasing lock "refresh_cache-83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1088.385187] env[63345]: DEBUG nova.compute.manager [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Instance network_info: |[{"id": "24c75109-1060-4770-8c15-3bce8002f3e0", "address": "fa:16:3e:56:92:00", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24c75109-10", "ovs_interfaceid": "24c75109-1060-4770-8c15-3bce8002f3e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1088.385652] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:92:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3753f451-fa23-4988-9361-074fb0bd3fd4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24c75109-1060-4770-8c15-3bce8002f3e0', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1088.393450] env[63345]: DEBUG oslo.service.loopingcall [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1088.393694] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1088.393933] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83dad0dd-87b7-42af-96ae-a89b4cf1f612 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.415464] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1088.415464] env[63345]: value = "task-1017856" [ 1088.415464] env[63345]: _type = "Task" [ 1088.415464] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.424366] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017856, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.555146] env[63345]: DEBUG nova.compute.manager [req-72859782-4b87-498d-a8cd-23f5c0488cd5 req-fabe92e7-c4e4-449e-9cfe-91a7ebafc8fb service nova] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Received event network-changed-24c75109-1060-4770-8c15-3bce8002f3e0 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1088.555368] env[63345]: DEBUG nova.compute.manager [req-72859782-4b87-498d-a8cd-23f5c0488cd5 req-fabe92e7-c4e4-449e-9cfe-91a7ebafc8fb service nova] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Refreshing instance network info cache due to event network-changed-24c75109-1060-4770-8c15-3bce8002f3e0. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1088.555601] env[63345]: DEBUG oslo_concurrency.lockutils [req-72859782-4b87-498d-a8cd-23f5c0488cd5 req-fabe92e7-c4e4-449e-9cfe-91a7ebafc8fb service nova] Acquiring lock "refresh_cache-83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.555758] env[63345]: DEBUG oslo_concurrency.lockutils [req-72859782-4b87-498d-a8cd-23f5c0488cd5 req-fabe92e7-c4e4-449e-9cfe-91a7ebafc8fb service nova] Acquired lock "refresh_cache-83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.555929] env[63345]: DEBUG nova.network.neutron [req-72859782-4b87-498d-a8cd-23f5c0488cd5 req-fabe92e7-c4e4-449e-9cfe-91a7ebafc8fb service nova] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Refreshing network info cache for port 24c75109-1060-4770-8c15-3bce8002f3e0 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1088.664713] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 02eb493e-d1a1-4461-8e3f-e493e96fe058] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1088.730781] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.228s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.731022] env[63345]: INFO nova.compute.manager [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Migrating [ 1088.740819] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.102s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.741196] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.767281] env[63345]: INFO nova.scheduler.client.report [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Deleted allocations for instance 726332dd-8699-49a4-a9ea-b9cbfc159855 [ 1088.793664] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017855, 'name': ReconfigVM_Task, 'duration_secs': 0.560595} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.793995] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 148c961e-d260-4dbd-ad9f-52f94b072096/148c961e-d260-4dbd-ad9f-52f94b072096.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1088.794674] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9037b54-547c-4e93-8f21-a53be90700cf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.803501] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1088.803501] env[63345]: value = "task-1017857" [ 1088.803501] env[63345]: _type = "Task" [ 1088.803501] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.813403] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017857, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.926068] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017856, 'name': CreateVM_Task, 'duration_secs': 0.36579} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.926281] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1088.926984] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.927212] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.927576] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1088.927871] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e32e59eb-d4ef-44eb-ade9-8a09286e8e18 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.933098] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1088.933098] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529cf259-625c-1b84-a6cc-a7a2fc291040" [ 1088.933098] env[63345]: _type = "Task" [ 1088.933098] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.941825] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]529cf259-625c-1b84-a6cc-a7a2fc291040, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.168619] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: a9b69d13-6330-4f9b-b8e1-1c0017655f9f] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1089.223657] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Volume attach. Driver type: vmdk {{(pid=63345) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1089.223926] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226170', 'volume_id': 'eb73c317-f521-4815-9091-7618b879a9fd', 'name': 'volume-eb73c317-f521-4815-9091-7618b879a9fd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '95ef4f91-a618-4ae2-95ad-d027c031f239', 'attached_at': '', 'detached_at': '', 'volume_id': 'eb73c317-f521-4815-9091-7618b879a9fd', 'serial': 'eb73c317-f521-4815-9091-7618b879a9fd'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1089.225125] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d24b53e8-1699-4b8a-bb1f-3443d5d32aa3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.242664] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad3f6e8-40a0-482a-93d7-ec324a3348d5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.260620] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "refresh_cache-7245e83c-2dda-4b2f-8a65-07f7e4d6828a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.260821] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "refresh_cache-7245e83c-2dda-4b2f-8a65-07f7e4d6828a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.261084] env[63345]: DEBUG nova.network.neutron [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1089.269713] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] volume-eb73c317-f521-4815-9091-7618b879a9fd/volume-eb73c317-f521-4815-9091-7618b879a9fd.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1089.272908] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0b46131-7468-4cd8-87fd-1ce514f1d79e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.288101] env[63345]: DEBUG oslo_concurrency.lockutils [None req-31ff480a-2dc5-40f0-8e04-e70d052dc4b7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "726332dd-8699-49a4-a9ea-b9cbfc159855" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.567s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.295960] env[63345]: DEBUG oslo_vmware.api [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1089.295960] env[63345]: value = "task-1017858" [ 1089.295960] env[63345]: _type = "Task" [ 1089.295960] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.305417] env[63345]: DEBUG oslo_vmware.api [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017858, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.314888] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017857, 'name': Rename_Task, 'duration_secs': 0.255285} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.315199] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1089.315456] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c947107-7fc6-42c1-a560-8e0baae16127 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.322335] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1089.322335] env[63345]: value = "task-1017859" [ 1089.322335] env[63345]: _type = "Task" [ 1089.322335] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.330900] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017859, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.447373] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]529cf259-625c-1b84-a6cc-a7a2fc291040, 'name': SearchDatastore_Task, 'duration_secs': 0.009625} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.447750] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.448170] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1089.448392] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.448592] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.448805] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1089.450126] env[63345]: DEBUG nova.network.neutron [req-72859782-4b87-498d-a8cd-23f5c0488cd5 req-fabe92e7-c4e4-449e-9cfe-91a7ebafc8fb service nova] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Updated VIF entry in instance network info cache for port 24c75109-1060-4770-8c15-3bce8002f3e0. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1089.450579] env[63345]: DEBUG nova.network.neutron [req-72859782-4b87-498d-a8cd-23f5c0488cd5 req-fabe92e7-c4e4-449e-9cfe-91a7ebafc8fb service nova] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Updating instance_info_cache with network_info: [{"id": "24c75109-1060-4770-8c15-3bce8002f3e0", "address": "fa:16:3e:56:92:00", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24c75109-10", "ovs_interfaceid": "24c75109-1060-4770-8c15-3bce8002f3e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.451866] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b5e87f1-7426-4434-83ce-417d47518af9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.462271] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1089.462474] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1089.463379] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f7aff30-22bb-4a43-a47f-2331d641a36b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.470184] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1089.470184] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5298f153-8dc0-b802-1404-44837135b05d" [ 1089.470184] env[63345]: _type = "Task" [ 1089.470184] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.480407] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5298f153-8dc0-b802-1404-44837135b05d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.545155] env[63345]: DEBUG oslo_concurrency.lockutils [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "869f8110-6490-4a47-955a-0ce085f826af" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.545456] env[63345]: DEBUG oslo_concurrency.lockutils [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "869f8110-6490-4a47-955a-0ce085f826af" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.545692] env[63345]: DEBUG oslo_concurrency.lockutils [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "869f8110-6490-4a47-955a-0ce085f826af-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.545880] env[63345]: DEBUG oslo_concurrency.lockutils [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "869f8110-6490-4a47-955a-0ce085f826af-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.546072] env[63345]: DEBUG oslo_concurrency.lockutils [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "869f8110-6490-4a47-955a-0ce085f826af-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.548851] env[63345]: INFO nova.compute.manager [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Terminating instance [ 1089.671981] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: bcec23fe-75c7-479e-9210-85ca6781d7e5] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1089.807198] env[63345]: DEBUG oslo_vmware.api [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017858, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.832939] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017859, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.956151] env[63345]: DEBUG oslo_concurrency.lockutils [req-72859782-4b87-498d-a8cd-23f5c0488cd5 req-fabe92e7-c4e4-449e-9cfe-91a7ebafc8fb service nova] Releasing lock "refresh_cache-83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.982859] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5298f153-8dc0-b802-1404-44837135b05d, 'name': SearchDatastore_Task, 'duration_secs': 0.009924} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.983491] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd892805-2412-4972-8ef7-e92c3ca56d09 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.989220] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1089.989220] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5274599d-b3f0-8062-9baa-aa0fc846d2ea" [ 1089.989220] env[63345]: _type = "Task" [ 1089.989220] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.998552] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5274599d-b3f0-8062-9baa-aa0fc846d2ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.052514] env[63345]: DEBUG nova.compute.manager [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1090.055076] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1090.055076] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a649aff-7bb5-431c-aefa-2aa45cc18baa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.063064] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1090.063471] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d14e7418-b56e-4d23-b921-d659cad391df {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.072034] env[63345]: DEBUG oslo_vmware.api [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1090.072034] env[63345]: value = "task-1017860" [ 1090.072034] env[63345]: _type = "Task" [ 1090.072034] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.083298] env[63345]: DEBUG oslo_vmware.api [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017860, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.175092] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: b5173471-3367-42ba-b450-62ad8573f048] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1090.187802] env[63345]: DEBUG oslo_concurrency.lockutils [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.187802] env[63345]: DEBUG oslo_concurrency.lockutils [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.302153] env[63345]: DEBUG nova.network.neutron [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Updating instance_info_cache with network_info: [{"id": "5709e20d-8dfc-41ae-981d-01de437144e3", "address": "fa:16:3e:43:47:ce", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5709e20d-8d", "ovs_interfaceid": "5709e20d-8dfc-41ae-981d-01de437144e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.309623] env[63345]: DEBUG oslo_vmware.api [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017858, 'name': ReconfigVM_Task, 'duration_secs': 0.566327} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.309964] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Reconfigured VM instance instance-0000006a to attach disk [datastore2] volume-eb73c317-f521-4815-9091-7618b879a9fd/volume-eb73c317-f521-4815-9091-7618b879a9fd.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1090.315935] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-907e580b-456e-45bc-983e-c471018d9492 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.338767] env[63345]: DEBUG oslo_vmware.api [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017859, 'name': PowerOnVM_Task, 'duration_secs': 0.608277} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.340256] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1090.340508] env[63345]: INFO nova.compute.manager [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Took 8.60 seconds to spawn the instance on the hypervisor. [ 1090.340677] env[63345]: DEBUG nova.compute.manager [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1090.341033] env[63345]: DEBUG oslo_vmware.api [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1090.341033] env[63345]: value = "task-1017861" [ 1090.341033] env[63345]: _type = "Task" [ 1090.341033] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.341788] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55cdfcf-1668-47c9-87da-a33d15946df5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.358048] env[63345]: DEBUG oslo_vmware.api [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017861, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.501518] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5274599d-b3f0-8062-9baa-aa0fc846d2ea, 'name': SearchDatastore_Task, 'duration_secs': 0.010197} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.501807] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.502092] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8/83ef21e9-62eb-4f0d-9c0c-a038743e0dd8.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1090.502371] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d305987a-4106-47d4-a656-f1a412f13006 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.511268] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1090.511268] env[63345]: value = "task-1017862" [ 1090.511268] env[63345]: _type = "Task" [ 1090.511268] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.520447] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017862, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.586788] env[63345]: DEBUG oslo_vmware.api [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017860, 'name': PowerOffVM_Task, 'duration_secs': 0.219501} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.587129] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1090.587310] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1090.587573] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1f9602f-73f2-4626-9583-dde1b702ff40 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.678839] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 3a85df04-3997-48a3-8992-f24fe997b3cc] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1090.688323] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1090.688604] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Deleting contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1090.689198] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleting the datastore file [datastore1] 869f8110-6490-4a47-955a-0ce085f826af {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1090.689198] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d89a2180-6bce-404a-af91-dfa6622b3ae1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.691451] env[63345]: DEBUG nova.compute.manager [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1090.701719] env[63345]: DEBUG oslo_vmware.api [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for the task: (returnval){ [ 1090.701719] env[63345]: value = "task-1017864" [ 1090.701719] env[63345]: _type = "Task" [ 1090.701719] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.714372] env[63345]: DEBUG oslo_vmware.api [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017864, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.804818] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "refresh_cache-7245e83c-2dda-4b2f-8a65-07f7e4d6828a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.855957] env[63345]: DEBUG oslo_vmware.api [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017861, 'name': ReconfigVM_Task, 'duration_secs': 0.23324} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.856430] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226170', 'volume_id': 'eb73c317-f521-4815-9091-7618b879a9fd', 'name': 'volume-eb73c317-f521-4815-9091-7618b879a9fd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '95ef4f91-a618-4ae2-95ad-d027c031f239', 'attached_at': '', 'detached_at': '', 'volume_id': 'eb73c317-f521-4815-9091-7618b879a9fd', 'serial': 'eb73c317-f521-4815-9091-7618b879a9fd'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1090.866186] env[63345]: INFO nova.compute.manager [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Took 21.09 seconds to build instance. [ 1091.021811] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017862, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487194} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.022099] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8/83ef21e9-62eb-4f0d-9c0c-a038743e0dd8.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1091.022342] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1091.022609] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d4df90b1-403b-4305-b3cb-ca64139d36b7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.030663] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1091.030663] env[63345]: value = "task-1017865" [ 1091.030663] env[63345]: _type = "Task" [ 1091.030663] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.042327] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017865, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.186858] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: e3d52cbd-e768-4425-b83e-180a6e58fd00] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1091.212580] env[63345]: DEBUG oslo_vmware.api [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Task: {'id': task-1017864, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.314555} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.212877] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1091.213126] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Deleted contents of the VM from datastore datastore1 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1091.213271] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1091.213475] env[63345]: INFO nova.compute.manager [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1091.213751] env[63345]: DEBUG oslo.service.loopingcall [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1091.214788] env[63345]: DEBUG oslo_concurrency.lockutils [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.215047] env[63345]: DEBUG oslo_concurrency.lockutils [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.216574] env[63345]: INFO nova.compute.claims [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1091.219028] env[63345]: DEBUG nova.compute.manager [-] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1091.219131] env[63345]: DEBUG nova.network.neutron [-] [instance: 869f8110-6490-4a47-955a-0ce085f826af] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1091.368968] env[63345]: DEBUG oslo_concurrency.lockutils [None req-04cd80bb-d948-4e72-ba08-bd49f68600db tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "148c961e-d260-4dbd-ad9f-52f94b072096" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.605s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.464732] env[63345]: DEBUG nova.compute.manager [req-6abe125c-e9f0-409f-9c7a-d234dea8bfe5 req-05f7aca0-89c0-466b-8101-bdc2395d8825 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Received event network-vif-deleted-9b0555db-b627-44ae-8812-42415d554cde {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1091.464979] env[63345]: INFO nova.compute.manager [req-6abe125c-e9f0-409f-9c7a-d234dea8bfe5 req-05f7aca0-89c0-466b-8101-bdc2395d8825 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Neutron deleted interface 9b0555db-b627-44ae-8812-42415d554cde; detaching it from the instance and deleting it from the info cache [ 1091.465196] env[63345]: DEBUG nova.network.neutron [req-6abe125c-e9f0-409f-9c7a-d234dea8bfe5 req-05f7aca0-89c0-466b-8101-bdc2395d8825 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.541655] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017865, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079043} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.541949] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1091.542769] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865afb3f-fbfd-477a-a75b-0afce5d77c73 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.566508] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8/83ef21e9-62eb-4f0d-9c0c-a038743e0dd8.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1091.566849] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-514b3221-0180-43f7-a620-d7d05da504b6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.590380] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1091.590380] env[63345]: value = "task-1017866" [ 1091.590380] env[63345]: _type = "Task" [ 1091.590380] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.604222] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017866, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.690565] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 93112cc1-f9a1-4188-9555-bddf483426a1] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1091.784552] env[63345]: DEBUG oslo_vmware.rw_handles [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52eb4aa8-fa46-ecee-a4f6-9877ae9571c6/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1091.785570] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5c7b15-faed-4aaa-86fd-66b234eff796 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.792848] env[63345]: DEBUG oslo_vmware.rw_handles [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52eb4aa8-fa46-ecee-a4f6-9877ae9571c6/disk-0.vmdk is in state: ready. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1091.793039] env[63345]: ERROR oslo_vmware.rw_handles [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52eb4aa8-fa46-ecee-a4f6-9877ae9571c6/disk-0.vmdk due to incomplete transfer. [ 1091.793267] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-05d9a96b-4499-4dc0-8688-227bdf794f9d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.801837] env[63345]: DEBUG oslo_vmware.rw_handles [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52eb4aa8-fa46-ecee-a4f6-9877ae9571c6/disk-0.vmdk. {{(pid=63345) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1091.802056] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Uploaded image 00f54121-1c47-489a-9345-a57300eace29 to the Glance image server {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1091.804539] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Destroying the VM {{(pid=63345) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1091.804736] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-11534319-f29d-45a6-b071-424c25f1ecba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.814842] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1091.814842] env[63345]: value = "task-1017867" [ 1091.814842] env[63345]: _type = "Task" [ 1091.814842] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.824191] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017867, 'name': Destroy_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.896886] env[63345]: DEBUG nova.objects.instance [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lazy-loading 'flavor' on Instance uuid 95ef4f91-a618-4ae2-95ad-d027c031f239 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1091.936832] env[63345]: DEBUG nova.network.neutron [-] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.967586] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-061a2da4-720c-4e7e-aa1a-d540569782ea {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.978118] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87813ff2-1780-4d7b-85c9-2d020903cd9a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.008243] env[63345]: DEBUG nova.compute.manager [req-6abe125c-e9f0-409f-9c7a-d234dea8bfe5 req-05f7aca0-89c0-466b-8101-bdc2395d8825 service nova] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Detach interface failed, port_id=9b0555db-b627-44ae-8812-42415d554cde, reason: Instance 869f8110-6490-4a47-955a-0ce085f826af could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1092.100672] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017866, 'name': ReconfigVM_Task, 'duration_secs': 0.384196} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.100996] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8/83ef21e9-62eb-4f0d-9c0c-a038743e0dd8.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1092.101776] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-efcdfd29-46a7-409f-969b-66c555ed6f53 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.112282] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1092.112282] env[63345]: value = "task-1017868" [ 1092.112282] env[63345]: _type = "Task" [ 1092.112282] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.124860] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017868, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.193930] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 778faa4f-4c5f-4ec2-b17b-5d7513c9c218] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1092.324749] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c395c240-2d23-4e5d-98a6-c7d69a6ce7dd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.333628] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017867, 'name': Destroy_Task, 'duration_secs': 0.388607} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.346916] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Destroyed the VM [ 1092.347208] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Deleting Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1092.347924] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Updating instance '7245e83c-2dda-4b2f-8a65-07f7e4d6828a' progress to 0 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1092.353464] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5da5caea-a2ca-425e-ab32-19ae0de56956 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.362169] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1092.362169] env[63345]: value = "task-1017869" [ 1092.362169] env[63345]: _type = "Task" [ 1092.362169] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.367927] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae511cc-ef07-48e3-9b8b-6b2e9311cd16 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.375818] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017869, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.381063] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c095633-af2d-4177-b7e1-5ef078ed4c0e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.418429] env[63345]: DEBUG oslo_concurrency.lockutils [None req-8849b63e-ff97-4cb2-878d-9edf3db09075 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "95ef4f91-a618-4ae2-95ad-d027c031f239" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.822s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.421255] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf663df-8e7c-4cdd-bfd0-ec817ace8eea {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.432083] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1bd2d8-5e5a-49c5-adde-d99657278d1c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.439541] env[63345]: INFO nova.compute.manager [-] [instance: 869f8110-6490-4a47-955a-0ce085f826af] Took 1.22 seconds to deallocate network for instance. [ 1092.449957] env[63345]: DEBUG nova.compute.provider_tree [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1092.620286] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "95ef4f91-a618-4ae2-95ad-d027c031f239" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.620655] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "95ef4f91-a618-4ae2-95ad-d027c031f239" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.628766] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017868, 'name': Rename_Task, 'duration_secs': 0.148156} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.629103] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1092.629412] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d2b5833-646c-4796-aa10-c379058ae754 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.641224] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1092.641224] env[63345]: value = "task-1017870" [ 1092.641224] env[63345]: _type = "Task" [ 1092.641224] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.652809] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017870, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.697162] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 6cbe136b-5bf6-4f17-bcef-b712d850615f] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1092.857851] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1092.858532] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-856b732f-007d-4bd7-93ee-ef8c1c4187e9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.867049] env[63345]: DEBUG oslo_vmware.api [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1092.867049] env[63345]: value = "task-1017871" [ 1092.867049] env[63345]: _type = "Task" [ 1092.867049] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.874697] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017869, 'name': RemoveSnapshot_Task, 'duration_secs': 0.368823} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.875350] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Deleted Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1092.875647] env[63345]: DEBUG nova.compute.manager [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1092.876432] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ca5c50-c907-4aa9-8ce4-a1a0b63fc1ff {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.881983] env[63345]: DEBUG oslo_vmware.api [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017871, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.955579] env[63345]: DEBUG nova.scheduler.client.report [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1092.959520] env[63345]: DEBUG oslo_concurrency.lockutils [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.124585] env[63345]: INFO nova.compute.manager [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Detaching volume eb73c317-f521-4815-9091-7618b879a9fd [ 1093.151927] env[63345]: DEBUG oslo_vmware.api [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017870, 'name': PowerOnVM_Task, 'duration_secs': 0.459057} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.152236] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1093.152453] env[63345]: INFO nova.compute.manager [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Took 7.36 seconds to spawn the instance on the hypervisor. [ 1093.152643] env[63345]: DEBUG nova.compute.manager [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1093.153528] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d23c50f-b5dd-45fc-9db5-25b1b729a8ed {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.164954] env[63345]: INFO nova.virt.block_device [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Attempting to driver detach volume eb73c317-f521-4815-9091-7618b879a9fd from mountpoint /dev/sdb [ 1093.165273] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Volume detach. Driver type: vmdk {{(pid=63345) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1093.165471] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226170', 'volume_id': 'eb73c317-f521-4815-9091-7618b879a9fd', 'name': 'volume-eb73c317-f521-4815-9091-7618b879a9fd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '95ef4f91-a618-4ae2-95ad-d027c031f239', 'attached_at': '', 'detached_at': '', 'volume_id': 'eb73c317-f521-4815-9091-7618b879a9fd', 'serial': 'eb73c317-f521-4815-9091-7618b879a9fd'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1093.166302] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35306d9-abb5-4795-ad94-104af2c2fbb8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.188540] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08cac0a-8883-4a4e-8b46-7bdcd39650f9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.196028] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1aeafad-c8e2-4840-882d-4f6c34fa3a9d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.200757] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 4a59b565-571f-48ef-97bd-bed9853e2d8e] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1093.220545] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e242c73-0415-4864-a2ec-3eb7f8d0893f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.236916] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] The volume has not been displaced from its original location: [datastore2] volume-eb73c317-f521-4815-9091-7618b879a9fd/volume-eb73c317-f521-4815-9091-7618b879a9fd.vmdk. No consolidation needed. {{(pid=63345) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1093.242075] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Reconfiguring VM instance instance-0000006a to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1093.242591] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c40243e1-eab9-4b5b-abb0-9e368d6b8c94 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.262647] env[63345]: DEBUG oslo_vmware.api [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1093.262647] env[63345]: value = "task-1017872" [ 1093.262647] env[63345]: _type = "Task" [ 1093.262647] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.273461] env[63345]: DEBUG oslo_vmware.api [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017872, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.377973] env[63345]: DEBUG oslo_vmware.api [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017871, 'name': PowerOffVM_Task, 'duration_secs': 0.414358} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.377973] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1093.377973] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Updating instance '7245e83c-2dda-4b2f-8a65-07f7e4d6828a' progress to 17 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1093.391548] env[63345]: INFO nova.compute.manager [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Shelve offloading [ 1093.461573] env[63345]: DEBUG oslo_concurrency.lockutils [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.246s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.461998] env[63345]: DEBUG nova.compute.manager [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1093.465783] env[63345]: DEBUG oslo_concurrency.lockutils [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.506s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.466373] env[63345]: DEBUG nova.objects.instance [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lazy-loading 'resources' on Instance uuid 869f8110-6490-4a47-955a-0ce085f826af {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.677678] env[63345]: INFO nova.compute.manager [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Took 23.56 seconds to build instance. [ 1093.703881] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 3b0d115d-dad5-4881-a0e0-b98f555da533] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1093.773561] env[63345]: DEBUG oslo_vmware.api [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017872, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.884548] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1093.884734] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1093.884892] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1093.885092] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1093.885252] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1093.885409] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1093.885622] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1093.885820] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1093.886011] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1093.886190] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1093.886370] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1093.891618] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-941e4b18-2330-4c6c-8769-a32c97ad1fff {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.901983] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1093.902248] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf084f25-4022-4b05-a435-96ae023095c4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.910471] env[63345]: DEBUG oslo_vmware.api [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1093.910471] env[63345]: value = "task-1017874" [ 1093.910471] env[63345]: _type = "Task" [ 1093.910471] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.911803] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1093.911803] env[63345]: value = "task-1017873" [ 1093.911803] env[63345]: _type = "Task" [ 1093.911803] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.928349] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] VM already powered off {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1093.928561] env[63345]: DEBUG nova.compute.manager [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1093.928827] env[63345]: DEBUG oslo_vmware.api [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017874, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.929664] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6518238e-52e6-4316-b32b-818286555658 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.936319] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.936526] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquired lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.936734] env[63345]: DEBUG nova.network.neutron [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1093.974737] env[63345]: DEBUG nova.compute.utils [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1093.976239] env[63345]: INFO nova.compute.manager [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Rescuing [ 1093.976475] env[63345]: DEBUG oslo_concurrency.lockutils [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "refresh_cache-83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.976673] env[63345]: DEBUG oslo_concurrency.lockutils [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired lock "refresh_cache-83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.976942] env[63345]: DEBUG nova.network.neutron [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1093.979194] env[63345]: DEBUG nova.compute.manager [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1093.979194] env[63345]: DEBUG nova.network.neutron [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1094.037719] env[63345]: DEBUG nova.policy [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3754c2317404a48a80cfee69f1044ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57e386920081487583ea143003aca8c4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 1094.096508] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f347a57b-f5d3-4a16-9014-5d79cb5dc597 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.105976] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b8eb95-f111-4b72-9a4c-a0ec42affff7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.138460] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8fc37f-9041-4b37-b457-8f1fb4401683 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.147152] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7df6e8f-6c9f-4951-81cb-13411a8b5db5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.161605] env[63345]: DEBUG nova.compute.provider_tree [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1094.178571] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0cb5d552-07bb-46a4-9494-0c69b92e6827 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.069s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.207265] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 85fb1ecd-4ca3-401d-a87a-131f0b275506] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1094.273684] env[63345]: DEBUG oslo_vmware.api [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017872, 'name': ReconfigVM_Task, 'duration_secs': 0.565951} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.274161] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Reconfigured VM instance instance-0000006a to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1094.285413] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47ca28bc-045b-499d-8f7f-3392bf26cf7d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.320620] env[63345]: DEBUG oslo_vmware.api [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1094.320620] env[63345]: value = "task-1017875" [ 1094.320620] env[63345]: _type = "Task" [ 1094.320620] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.342606] env[63345]: DEBUG oslo_vmware.api [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017875, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.412250] env[63345]: DEBUG nova.network.neutron [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Successfully created port: 104a12d2-9632-4d24-a0e3-d4b18e907a58 {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1094.426775] env[63345]: DEBUG oslo_vmware.api [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017874, 'name': ReconfigVM_Task, 'duration_secs': 0.295562} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.427136] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Updating instance '7245e83c-2dda-4b2f-8a65-07f7e4d6828a' progress to 33 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1094.479107] env[63345]: DEBUG nova.compute.manager [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1094.664852] env[63345]: DEBUG nova.scheduler.client.report [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1094.682679] env[63345]: DEBUG nova.network.neutron [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Updating instance_info_cache with network_info: [{"id": "f9b10cca-c2c3-45d2-a329-61efee5dde7f", "address": "fa:16:3e:31:f2:fb", "network": {"id": "95d95c9b-b21c-4ee5-ab54-d0bf2699d38e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-88421441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba08f64c26d245a8b8f2b52ea97c2f1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9b10cca-c2", "ovs_interfaceid": "f9b10cca-c2c3-45d2-a329-61efee5dde7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.710790] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: f043239f-7158-4199-a784-d711a5a301be] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1094.831190] env[63345]: DEBUG oslo_vmware.api [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017875, 'name': ReconfigVM_Task, 'duration_secs': 0.16402} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.831543] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226170', 'volume_id': 'eb73c317-f521-4815-9091-7618b879a9fd', 'name': 'volume-eb73c317-f521-4815-9091-7618b879a9fd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '95ef4f91-a618-4ae2-95ad-d027c031f239', 'attached_at': '', 'detached_at': '', 'volume_id': 'eb73c317-f521-4815-9091-7618b879a9fd', 'serial': 'eb73c317-f521-4815-9091-7618b879a9fd'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1094.930833] env[63345]: DEBUG nova.network.neutron [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Updating instance_info_cache with network_info: [{"id": "24c75109-1060-4770-8c15-3bce8002f3e0", "address": "fa:16:3e:56:92:00", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24c75109-10", "ovs_interfaceid": "24c75109-1060-4770-8c15-3bce8002f3e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.935195] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1094.935434] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1094.935599] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1094.935774] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1094.935923] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1094.936086] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1094.936296] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1094.936460] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1094.936639] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1094.936808] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1094.936990] env[63345]: DEBUG nova.virt.hardware [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1094.947028] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Reconfiguring VM instance instance-0000006e to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1094.947606] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f339ce51-9e8d-4e1a-aa3d-9384d80a7080 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.969936] env[63345]: DEBUG oslo_vmware.api [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1094.969936] env[63345]: value = "task-1017876" [ 1094.969936] env[63345]: _type = "Task" [ 1094.969936] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.978909] env[63345]: DEBUG oslo_vmware.api [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017876, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.172167] env[63345]: DEBUG oslo_concurrency.lockutils [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.706s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.186063] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Releasing lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1095.194955] env[63345]: INFO nova.scheduler.client.report [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Deleted allocations for instance 869f8110-6490-4a47-955a-0ce085f826af [ 1095.214360] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 7bef089c-e93b-4ba6-a683-4e076489f92a] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1095.379524] env[63345]: DEBUG nova.objects.instance [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lazy-loading 'flavor' on Instance uuid 95ef4f91-a618-4ae2-95ad-d027c031f239 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1095.424924] env[63345]: DEBUG nova.compute.manager [req-53e89cf3-6a7e-4ff6-8ac8-0090d6dd4503 req-8b95debf-c2e6-412c-abbe-f5fb481bdffe service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Received event network-vif-unplugged-f9b10cca-c2c3-45d2-a329-61efee5dde7f {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1095.425189] env[63345]: DEBUG oslo_concurrency.lockutils [req-53e89cf3-6a7e-4ff6-8ac8-0090d6dd4503 req-8b95debf-c2e6-412c-abbe-f5fb481bdffe service nova] Acquiring lock "95738bee-d291-4f27-aeff-9445939bb3fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.425412] env[63345]: DEBUG oslo_concurrency.lockutils [req-53e89cf3-6a7e-4ff6-8ac8-0090d6dd4503 req-8b95debf-c2e6-412c-abbe-f5fb481bdffe service nova] Lock "95738bee-d291-4f27-aeff-9445939bb3fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.425724] env[63345]: DEBUG oslo_concurrency.lockutils [req-53e89cf3-6a7e-4ff6-8ac8-0090d6dd4503 req-8b95debf-c2e6-412c-abbe-f5fb481bdffe service nova] Lock "95738bee-d291-4f27-aeff-9445939bb3fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.425829] env[63345]: DEBUG nova.compute.manager [req-53e89cf3-6a7e-4ff6-8ac8-0090d6dd4503 req-8b95debf-c2e6-412c-abbe-f5fb481bdffe service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] No waiting events found dispatching network-vif-unplugged-f9b10cca-c2c3-45d2-a329-61efee5dde7f {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1095.426061] env[63345]: WARNING nova.compute.manager [req-53e89cf3-6a7e-4ff6-8ac8-0090d6dd4503 req-8b95debf-c2e6-412c-abbe-f5fb481bdffe service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Received unexpected event network-vif-unplugged-f9b10cca-c2c3-45d2-a329-61efee5dde7f for instance with vm_state shelved and task_state shelving_offloading. [ 1095.448891] env[63345]: DEBUG oslo_concurrency.lockutils [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Releasing lock "refresh_cache-83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1095.480793] env[63345]: DEBUG oslo_vmware.api [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017876, 'name': ReconfigVM_Task, 'duration_secs': 0.166318} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.481324] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Reconfigured VM instance instance-0000006e to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1095.482291] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645e2cae-f704-42e7-95c7-36042dab4610 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.499069] env[63345]: DEBUG nova.compute.manager [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1095.509176] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 7245e83c-2dda-4b2f-8a65-07f7e4d6828a/7245e83c-2dda-4b2f-8a65-07f7e4d6828a.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1095.509874] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-444dcdfb-e321-48c1-bc1e-3f6f9b464cd4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.529084] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1095.529861] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d5717e-0323-4011-a0bf-0bdda20bbf3b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.534323] env[63345]: DEBUG nova.virt.hardware [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1095.534565] env[63345]: DEBUG nova.virt.hardware [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1095.534730] env[63345]: DEBUG nova.virt.hardware [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1095.534920] env[63345]: DEBUG nova.virt.hardware [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1095.535084] env[63345]: DEBUG nova.virt.hardware [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1095.535242] env[63345]: DEBUG nova.virt.hardware [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1095.535452] env[63345]: DEBUG nova.virt.hardware [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1095.535619] env[63345]: DEBUG nova.virt.hardware [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1095.535812] env[63345]: DEBUG nova.virt.hardware [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1095.535998] env[63345]: DEBUG nova.virt.hardware [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1095.536194] env[63345]: DEBUG nova.virt.hardware [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1095.537948] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1355546f-42e0-4809-9ea4-5606ab0b575c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.540401] env[63345]: DEBUG oslo_vmware.api [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1095.540401] env[63345]: value = "task-1017877" [ 1095.540401] env[63345]: _type = "Task" [ 1095.540401] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.548417] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1095.549953] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64afb06f-2997-408a-ac0b-257163e30380 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.555887] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6eda4f4d-b4f2-4074-b661-7a944cc20c48 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.560178] env[63345]: DEBUG oslo_vmware.api [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017877, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.642178] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1095.642558] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1095.642782] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Deleting the datastore file [datastore2] 95738bee-d291-4f27-aeff-9445939bb3fa {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1095.643091] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-907133da-5b7a-410c-bd62-445dedfbb567 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.650524] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1095.650524] env[63345]: value = "task-1017879" [ 1095.650524] env[63345]: _type = "Task" [ 1095.650524] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.659517] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017879, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.704330] env[63345]: DEBUG oslo_concurrency.lockutils [None req-161c761c-81c3-453c-a23d-4392f1ef777e tempest-ServerActionsTestOtherA-316720793 tempest-ServerActionsTestOtherA-316720793-project-member] Lock "869f8110-6490-4a47-955a-0ce085f826af" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.159s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.717992] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 3e4e58bd-903b-4b3d-8be4-5678aab6c721] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1096.051483] env[63345]: DEBUG oslo_vmware.api [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017877, 'name': ReconfigVM_Task, 'duration_secs': 0.361523} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.051782] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 7245e83c-2dda-4b2f-8a65-07f7e4d6828a/7245e83c-2dda-4b2f-8a65-07f7e4d6828a.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1096.052087] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Updating instance '7245e83c-2dda-4b2f-8a65-07f7e4d6828a' progress to 50 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1096.162703] env[63345]: DEBUG oslo_vmware.api [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017879, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162807} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.163381] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1096.164327] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1096.164327] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1096.189035] env[63345]: INFO nova.scheduler.client.report [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Deleted allocations for instance 95738bee-d291-4f27-aeff-9445939bb3fa [ 1096.221862] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 805f9143-a8d8-4995-a20d-3b10ef3ab599] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1096.386695] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4f1133eb-2615-4f5b-9232-e78b5899b63e tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "95ef4f91-a618-4ae2-95ad-d027c031f239" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.766s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.405286] env[63345]: DEBUG nova.network.neutron [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Successfully updated port: 104a12d2-9632-4d24-a0e3-d4b18e907a58 {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1096.486640] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1096.486979] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d764f59-2c45-4849-acf0-4aa6b44d1f42 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.495890] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1096.495890] env[63345]: value = "task-1017880" [ 1096.495890] env[63345]: _type = "Task" [ 1096.495890] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.505974] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017880, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.558631] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbbc08d-7324-4724-ae19-ad1d8a2b315d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.581753] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f9fa4b-2b1b-4ef0-a8bd-ed877d0c9e5d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.601171] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Updating instance '7245e83c-2dda-4b2f-8a65-07f7e4d6828a' progress to 67 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1096.693722] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.694042] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.694306] env[63345]: DEBUG nova.objects.instance [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lazy-loading 'resources' on Instance uuid 95738bee-d291-4f27-aeff-9445939bb3fa {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1096.725529] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 070a834d-6478-4705-8df0-2a27c8780507] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1096.905892] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "95ef4f91-a618-4ae2-95ad-d027c031f239" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.906186] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "95ef4f91-a618-4ae2-95ad-d027c031f239" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.906405] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "95ef4f91-a618-4ae2-95ad-d027c031f239-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.906604] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "95ef4f91-a618-4ae2-95ad-d027c031f239-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.906780] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "95ef4f91-a618-4ae2-95ad-d027c031f239-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.909085] env[63345]: DEBUG oslo_concurrency.lockutils [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.909224] env[63345]: DEBUG oslo_concurrency.lockutils [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.909376] env[63345]: DEBUG nova.network.neutron [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1096.910558] env[63345]: INFO nova.compute.manager [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Terminating instance [ 1097.011506] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017880, 'name': PowerOffVM_Task, 'duration_secs': 0.319061} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.011894] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1097.012856] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8f73a2-cb8c-4f89-bb3d-060d52645460 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.032636] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede6b5a8-f972-4942-80d5-49dc4d6f9061 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.082810] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1097.083224] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e8f1ce6-010f-4128-9797-92a763c9d106 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.091771] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1097.091771] env[63345]: value = "task-1017881" [ 1097.091771] env[63345]: _type = "Task" [ 1097.091771] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.100344] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017881, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.148217] env[63345]: DEBUG nova.network.neutron [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Port 5709e20d-8dfc-41ae-981d-01de437144e3 binding to destination host cpu-1 is already ACTIVE {{(pid=63345) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1097.197247] env[63345]: DEBUG nova.objects.instance [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lazy-loading 'numa_topology' on Instance uuid 95738bee-d291-4f27-aeff-9445939bb3fa {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1097.228194] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: bc9d2e6a-f77a-4a21-90bc-81949cbfce91] Instance has had 0 of 5 cleanup attempts {{(pid=63345) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1097.415694] env[63345]: DEBUG nova.compute.manager [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1097.416023] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1097.416906] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb32b83-7038-426e-b878-e74d19b02bfb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.427433] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1097.427953] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a919153-75b4-423d-8685-ba37f6acc268 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.438427] env[63345]: DEBUG oslo_vmware.api [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1097.438427] env[63345]: value = "task-1017882" [ 1097.438427] env[63345]: _type = "Task" [ 1097.438427] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.456880] env[63345]: DEBUG oslo_vmware.api [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017882, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.459709] env[63345]: DEBUG nova.network.neutron [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1097.604614] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] VM already powered off {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1097.604942] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1097.605385] env[63345]: DEBUG oslo_concurrency.lockutils [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1097.605637] env[63345]: DEBUG oslo_concurrency.lockutils [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.606085] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1097.606404] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b2de053-1e24-4e44-a3f3-cb3eb5424500 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.617934] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1097.618244] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1097.621584] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b513e55a-83bb-4855-a7f1-60a0fd75e46b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.634569] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1097.634569] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52cbcc95-ac1d-6a08-4ded-e8c6f5e68051" [ 1097.634569] env[63345]: _type = "Task" [ 1097.634569] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.649808] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52cbcc95-ac1d-6a08-4ded-e8c6f5e68051, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.681811] env[63345]: DEBUG nova.network.neutron [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating instance_info_cache with network_info: [{"id": "104a12d2-9632-4d24-a0e3-d4b18e907a58", "address": "fa:16:3e:a9:14:31", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap104a12d2-96", "ovs_interfaceid": "104a12d2-9632-4d24-a0e3-d4b18e907a58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.702811] env[63345]: DEBUG nova.objects.base [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Object Instance<95738bee-d291-4f27-aeff-9445939bb3fa> lazy-loaded attributes: resources,numa_topology {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1097.710886] env[63345]: DEBUG nova.compute.manager [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Received event network-changed-f9b10cca-c2c3-45d2-a329-61efee5dde7f {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1097.711499] env[63345]: DEBUG nova.compute.manager [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Refreshing instance network info cache due to event network-changed-f9b10cca-c2c3-45d2-a329-61efee5dde7f. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1097.711499] env[63345]: DEBUG oslo_concurrency.lockutils [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] Acquiring lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1097.712207] env[63345]: DEBUG oslo_concurrency.lockutils [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] Acquired lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.714016] env[63345]: DEBUG nova.network.neutron [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Refreshing network info cache for port f9b10cca-c2c3-45d2-a329-61efee5dde7f {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1097.730934] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.731226] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Cleaning up deleted instances with incomplete migration {{(pid=63345) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11595}} [ 1097.857414] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3ee893-5071-45df-9750-3755714dfc9d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.865731] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0239ef-102a-440d-b4cb-00fc75f041d9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.895988] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5981780f-4e22-4589-80fe-c32602e229c5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.903937] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad43763-68be-416d-bacf-fb17a7a2ce4e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.918915] env[63345]: DEBUG nova.compute.provider_tree [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1097.950849] env[63345]: DEBUG oslo_vmware.api [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017882, 'name': PowerOffVM_Task, 'duration_secs': 0.38887} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.950849] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1097.950849] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1097.951114] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9a48c70a-976b-49f4-bf37-9f87ea7a9417 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.036264] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1098.036446] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1098.036881] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Deleting the datastore file [datastore2] 95ef4f91-a618-4ae2-95ad-d027c031f239 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1098.036881] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64a6ff92-0bf2-4697-a06c-be219e771370 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.043526] env[63345]: DEBUG oslo_vmware.api [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1098.043526] env[63345]: value = "task-1017884" [ 1098.043526] env[63345]: _type = "Task" [ 1098.043526] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.051727] env[63345]: DEBUG oslo_vmware.api [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017884, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.146029] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52cbcc95-ac1d-6a08-4ded-e8c6f5e68051, 'name': SearchDatastore_Task, 'duration_secs': 0.019388} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.146342] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30326932-3d91-4833-943b-9f58925d5f36 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.152480] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1098.152480] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f48bee-ce94-c46c-e668-65e4d1fbe22c" [ 1098.152480] env[63345]: _type = "Task" [ 1098.152480] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.188765] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f48bee-ce94-c46c-e668-65e4d1fbe22c, 'name': SearchDatastore_Task, 'duration_secs': 0.010146} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.189778] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "7245e83c-2dda-4b2f-8a65-07f7e4d6828a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.189778] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "7245e83c-2dda-4b2f-8a65-07f7e4d6828a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.189778] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "7245e83c-2dda-4b2f-8a65-07f7e4d6828a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.191234] env[63345]: DEBUG oslo_concurrency.lockutils [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.191399] env[63345]: DEBUG nova.compute.manager [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Instance network_info: |[{"id": "104a12d2-9632-4d24-a0e3-d4b18e907a58", "address": "fa:16:3e:a9:14:31", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap104a12d2-96", "ovs_interfaceid": "104a12d2-9632-4d24-a0e3-d4b18e907a58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1098.191944] env[63345]: DEBUG oslo_concurrency.lockutils [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.192303] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk. {{(pid=63345) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1098.193756] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:14:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '104a12d2-9632-4d24-a0e3-d4b18e907a58', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1098.208881] env[63345]: DEBUG oslo.service.loopingcall [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1098.209015] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5a34631-1f5b-4ed6-96cb-b693fbb100c4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.213390] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1098.213976] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5772de8-7ba2-4aff-892f-08cb32338619 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.236468] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.241027] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1098.241027] env[63345]: value = "task-1017885" [ 1098.241027] env[63345]: _type = "Task" [ 1098.241027] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.243255] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1098.243255] env[63345]: value = "task-1017886" [ 1098.243255] env[63345]: _type = "Task" [ 1098.243255] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.250434] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017885, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.256073] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017886, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.422403] env[63345]: DEBUG nova.scheduler.client.report [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1098.555204] env[63345]: DEBUG oslo_vmware.api [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017884, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186787} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.555562] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1098.555831] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1098.556110] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1098.556578] env[63345]: INFO nova.compute.manager [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1098.556790] env[63345]: DEBUG oslo.service.loopingcall [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1098.556894] env[63345]: DEBUG nova.compute.manager [-] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1098.557145] env[63345]: DEBUG nova.network.neutron [-] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1098.578514] env[63345]: DEBUG nova.network.neutron [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Updated VIF entry in instance network info cache for port f9b10cca-c2c3-45d2-a329-61efee5dde7f. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1098.578959] env[63345]: DEBUG nova.network.neutron [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Updating instance_info_cache with network_info: [{"id": "f9b10cca-c2c3-45d2-a329-61efee5dde7f", "address": "fa:16:3e:31:f2:fb", "network": {"id": "95d95c9b-b21c-4ee5-ab54-d0bf2699d38e", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-88421441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba08f64c26d245a8b8f2b52ea97c2f1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapf9b10cca-c2", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.756909] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017885, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.761251] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017886, 'name': CreateVM_Task, 'duration_secs': 0.40456} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.762094] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1098.763035] env[63345]: DEBUG oslo_concurrency.lockutils [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1098.763342] env[63345]: DEBUG oslo_concurrency.lockutils [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.763786] env[63345]: DEBUG oslo_concurrency.lockutils [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1098.764159] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66d6329e-4e6f-4639-98c8-bbd4dd1fc9e8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.771081] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1098.771081] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52926fd5-9e3e-2fd3-ce7b-314c58482a19" [ 1098.771081] env[63345]: _type = "Task" [ 1098.771081] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.780812] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52926fd5-9e3e-2fd3-ce7b-314c58482a19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.929688] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.236s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.085156] env[63345]: DEBUG oslo_concurrency.lockutils [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] Releasing lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.085156] env[63345]: DEBUG nova.compute.manager [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Received event network-vif-plugged-104a12d2-9632-4d24-a0e3-d4b18e907a58 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1099.085156] env[63345]: DEBUG oslo_concurrency.lockutils [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] Acquiring lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.085156] env[63345]: DEBUG oslo_concurrency.lockutils [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] Lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.085156] env[63345]: DEBUG oslo_concurrency.lockutils [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] Lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.085156] env[63345]: DEBUG nova.compute.manager [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] No waiting events found dispatching network-vif-plugged-104a12d2-9632-4d24-a0e3-d4b18e907a58 {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1099.085156] env[63345]: WARNING nova.compute.manager [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Received unexpected event network-vif-plugged-104a12d2-9632-4d24-a0e3-d4b18e907a58 for instance with vm_state building and task_state spawning. [ 1099.085156] env[63345]: DEBUG nova.compute.manager [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Received event network-changed-104a12d2-9632-4d24-a0e3-d4b18e907a58 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1099.085156] env[63345]: DEBUG nova.compute.manager [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Refreshing instance network info cache due to event network-changed-104a12d2-9632-4d24-a0e3-d4b18e907a58. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1099.085156] env[63345]: DEBUG oslo_concurrency.lockutils [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] Acquiring lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.085156] env[63345]: DEBUG oslo_concurrency.lockutils [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] Acquired lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.085156] env[63345]: DEBUG nova.network.neutron [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Refreshing network info cache for port 104a12d2-9632-4d24-a0e3-d4b18e907a58 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1099.222606] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "refresh_cache-7245e83c-2dda-4b2f-8a65-07f7e4d6828a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.222816] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "refresh_cache-7245e83c-2dda-4b2f-8a65-07f7e4d6828a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.223010] env[63345]: DEBUG nova.network.neutron [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1099.240591] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "95738bee-d291-4f27-aeff-9445939bb3fa" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.251990] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017885, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552532} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.256112] env[63345]: INFO nova.virt.vmwareapi.ds_util [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk. [ 1099.256962] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19153c26-09b4-4efd-bf46-ad7269d82365 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.295253] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1099.298987] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb05f1ce-1818-4514-9d5c-3d5de38f0868 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.320544] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52926fd5-9e3e-2fd3-ce7b-314c58482a19, 'name': SearchDatastore_Task, 'duration_secs': 0.033807} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.323129] env[63345]: DEBUG oslo_concurrency.lockutils [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.323129] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1099.323129] env[63345]: DEBUG oslo_concurrency.lockutils [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.323129] env[63345]: DEBUG oslo_concurrency.lockutils [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.323814] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1099.323912] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1099.323912] env[63345]: value = "task-1017887" [ 1099.323912] env[63345]: _type = "Task" [ 1099.323912] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.324775] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63caa2dc-fe8b-4240-8061-2efbb8881542 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.338456] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017887, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.339369] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1099.340024] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1099.340291] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b326397-cced-41fc-ad92-2b124f348d72 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.346309] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1099.346309] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b1e12d-476d-6281-37a3-d12d1b710ced" [ 1099.346309] env[63345]: _type = "Task" [ 1099.346309] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.356390] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b1e12d-476d-6281-37a3-d12d1b710ced, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.446877] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f57ef451-7da6-43fb-b0da-0f1d9907f2f8 tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "95738bee-d291-4f27-aeff-9445939bb3fa" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.089s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.446877] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "95738bee-d291-4f27-aeff-9445939bb3fa" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.205s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.446877] env[63345]: INFO nova.compute.manager [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Unshelving [ 1099.598515] env[63345]: DEBUG nova.network.neutron [-] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.737442] env[63345]: DEBUG nova.compute.manager [req-e9904199-c320-45ae-ae7e-ce7098619b51 req-36684fd3-3d88-4066-95a6-22a25a46593f service nova] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Received event network-vif-deleted-e65d10a2-1bac-4be4-846a-6fc94207c2b7 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1099.837879] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017887, 'name': ReconfigVM_Task, 'duration_secs': 0.301773} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.840504] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1099.841477] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed5b33f-21e5-4405-a080-b8c1d8d6e551 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.878338] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa4d9ac2-f79d-4eeb-b322-43b6741935fc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.893413] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b1e12d-476d-6281-37a3-d12d1b710ced, 'name': SearchDatastore_Task, 'duration_secs': 0.011394} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.894615] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79ef3d8e-1e38-48d5-9eeb-4f4a3e36c291 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.900591] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1099.900591] env[63345]: value = "task-1017888" [ 1099.900591] env[63345]: _type = "Task" [ 1099.900591] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.902038] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1099.902038] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f38bcb-03f6-ef6f-dbc4-57c43d1d9e1e" [ 1099.902038] env[63345]: _type = "Task" [ 1099.902038] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.915300] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f38bcb-03f6-ef6f-dbc4-57c43d1d9e1e, 'name': SearchDatastore_Task, 'duration_secs': 0.010043} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.918755] env[63345]: DEBUG oslo_concurrency.lockutils [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.918755] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46/5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1099.919358] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017888, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.919599] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-444eb056-c8d6-436a-b056-42e081180067 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.926451] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1099.926451] env[63345]: value = "task-1017889" [ 1099.926451] env[63345]: _type = "Task" [ 1099.926451] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.935869] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017889, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.986456] env[63345]: DEBUG nova.network.neutron [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updated VIF entry in instance network info cache for port 104a12d2-9632-4d24-a0e3-d4b18e907a58. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1099.986843] env[63345]: DEBUG nova.network.neutron [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating instance_info_cache with network_info: [{"id": "104a12d2-9632-4d24-a0e3-d4b18e907a58", "address": "fa:16:3e:a9:14:31", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap104a12d2-96", "ovs_interfaceid": "104a12d2-9632-4d24-a0e3-d4b18e907a58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.101122] env[63345]: INFO nova.compute.manager [-] [instance: 95ef4f91-a618-4ae2-95ad-d027c031f239] Took 1.54 seconds to deallocate network for instance. [ 1100.102334] env[63345]: DEBUG nova.network.neutron [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Updating instance_info_cache with network_info: [{"id": "5709e20d-8dfc-41ae-981d-01de437144e3", "address": "fa:16:3e:43:47:ce", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5709e20d-8d", "ovs_interfaceid": "5709e20d-8dfc-41ae-981d-01de437144e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.412998] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017888, 'name': ReconfigVM_Task, 'duration_secs': 0.164845} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.413548] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1100.413623] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-164acf88-efd5-4da9-9131-7a6dce4967e5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.424964] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1100.424964] env[63345]: value = "task-1017890" [ 1100.424964] env[63345]: _type = "Task" [ 1100.424964] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.440448] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017889, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.443917] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017890, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.473840] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.474135] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.474351] env[63345]: DEBUG nova.objects.instance [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lazy-loading 'pci_requests' on Instance uuid 95738bee-d291-4f27-aeff-9445939bb3fa {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1100.489854] env[63345]: DEBUG oslo_concurrency.lockutils [req-37cf4997-a237-48f3-8287-e93d279d0eda req-0fff16ca-1b0a-4406-ad06-0ce04ce88415 service nova] Releasing lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1100.606347] env[63345]: DEBUG oslo_concurrency.lockutils [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "refresh_cache-7245e83c-2dda-4b2f-8a65-07f7e4d6828a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1100.610672] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.938130] env[63345]: DEBUG oslo_vmware.api [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017890, 'name': PowerOnVM_Task, 'duration_secs': 0.440111} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.941241] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1100.942912] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017889, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558572} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.943165] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46/5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1100.943382] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1100.944509] env[63345]: DEBUG nova.compute.manager [None req-335f4f7b-16b5-4800-92af-6c82e791d2a8 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1100.945271] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ebf08ae8-16db-4c40-ad86-ce5e98b4c50c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.947465] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08aa360-beb3-4ccc-b407-33e7c12715ce {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.957598] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1100.957598] env[63345]: value = "task-1017891" [ 1100.957598] env[63345]: _type = "Task" [ 1100.957598] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.965903] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017891, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.977983] env[63345]: DEBUG nova.objects.instance [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lazy-loading 'numa_topology' on Instance uuid 95738bee-d291-4f27-aeff-9445939bb3fa {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1101.127620] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc7450f-59bb-4473-9437-722086dc4dba {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.147540] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b0507c-0f1e-4130-9b70-4e98d08fa4a8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.154956] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Updating instance '7245e83c-2dda-4b2f-8a65-07f7e4d6828a' progress to 83 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1101.470391] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017891, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067839} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.470768] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1101.471417] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ec6368-0338-4e48-a7e2-6fd23904db2e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.485304] env[63345]: INFO nova.compute.claims [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1101.495975] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46/5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1101.497032] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2363d8f7-8310-40dc-a1be-5bec51d3454f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.518801] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1101.518801] env[63345]: value = "task-1017892" [ 1101.518801] env[63345]: _type = "Task" [ 1101.518801] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.529793] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017892, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.661068] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1101.661404] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc871731-8403-48b9-aa3e-c84901f82aeb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.670203] env[63345]: DEBUG oslo_vmware.api [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1101.670203] env[63345]: value = "task-1017893" [ 1101.670203] env[63345]: _type = "Task" [ 1101.670203] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.680221] env[63345]: DEBUG oslo_vmware.api [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017893, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.030397] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017892, 'name': ReconfigVM_Task, 'duration_secs': 0.29543} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.030673] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Reconfigured VM instance instance-00000071 to attach disk [datastore2] 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46/5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1102.031338] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3ac7a56e-ab24-4546-b0f3-0e5aae742a48 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.039759] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1102.039759] env[63345]: value = "task-1017894" [ 1102.039759] env[63345]: _type = "Task" [ 1102.039759] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.051062] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017894, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.184169] env[63345]: DEBUG oslo_vmware.api [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017893, 'name': PowerOnVM_Task, 'duration_secs': 0.406391} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.184463] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1102.184705] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-44fe4ce0-3a77-4dd8-8b05-571bd5761283 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Updating instance '7245e83c-2dda-4b2f-8a65-07f7e4d6828a' progress to 100 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1102.549468] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017894, 'name': Rename_Task, 'duration_secs': 0.159132} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.549826] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1102.550369] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ccac526a-cf2a-4fd6-a33e-aac7bc2bd8f5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.556926] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1102.556926] env[63345]: value = "task-1017895" [ 1102.556926] env[63345]: _type = "Task" [ 1102.556926] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.564752] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017895, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.610224] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975451b6-0839-4c96-a845-88cd5cf7e887 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.618486] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29d237f-73da-4c8a-b20b-0aa5f6d06549 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.649887] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30262779-19e6-4c74-9ef1-c0d171b4a332 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.657861] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3785d530-e320-4443-b946-7d1215344f30 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.672738] env[63345]: DEBUG nova.compute.provider_tree [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1103.067491] env[63345]: DEBUG oslo_vmware.api [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017895, 'name': PowerOnVM_Task, 'duration_secs': 0.44878} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.067801] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1103.068049] env[63345]: INFO nova.compute.manager [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Took 7.57 seconds to spawn the instance on the hypervisor. [ 1103.068247] env[63345]: DEBUG nova.compute.manager [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1103.069068] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a8faa2-4215-4558-b93e-4cbdbc71d0f7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.176835] env[63345]: DEBUG nova.scheduler.client.report [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1103.589851] env[63345]: INFO nova.compute.manager [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Took 12.39 seconds to build instance. [ 1103.681815] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.208s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.684499] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.074s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.684824] env[63345]: DEBUG nova.objects.instance [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lazy-loading 'resources' on Instance uuid 95ef4f91-a618-4ae2-95ad-d027c031f239 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1103.724318] env[63345]: INFO nova.network.neutron [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Updating port f9b10cca-c2c3-45d2-a329-61efee5dde7f with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1103.834043] env[63345]: INFO nova.compute.manager [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Rescuing [ 1103.834173] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "refresh_cache-148c961e-d260-4dbd-ad9f-52f94b072096" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1103.834335] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired lock "refresh_cache-148c961e-d260-4dbd-ad9f-52f94b072096" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.834502] env[63345]: DEBUG nova.network.neutron [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1104.045592] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "7245e83c-2dda-4b2f-8a65-07f7e4d6828a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.045882] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "7245e83c-2dda-4b2f-8a65-07f7e4d6828a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.046557] env[63345]: DEBUG nova.compute.manager [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Going to confirm migration 6 {{(pid=63345) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5112}} [ 1104.091861] env[63345]: DEBUG oslo_concurrency.lockutils [None req-29ee1038-7fca-49ae-9ad5-72eaca75d374 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.904s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.303042] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ec2072-e5e4-48ba-9407-327857127b76 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.312221] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9dc473c-09f5-425f-b87e-651b1a45ee3f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.346583] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e740e6e3-9305-4085-a7ce-fd55fc5ec4e8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.355335] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a87d54de-4635-4ff0-8499-1f58025587e3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.369021] env[63345]: DEBUG nova.compute.provider_tree [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1104.588037] env[63345]: DEBUG nova.network.neutron [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Updating instance_info_cache with network_info: [{"id": "f2837ec1-0df3-454a-bc68-fb0ca9562eb4", "address": "fa:16:3e:85:d4:0e", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2837ec1-0d", "ovs_interfaceid": "f2837ec1-0df3-454a-bc68-fb0ca9562eb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.731797] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "refresh_cache-7245e83c-2dda-4b2f-8a65-07f7e4d6828a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1104.732295] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquired lock "refresh_cache-7245e83c-2dda-4b2f-8a65-07f7e4d6828a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.732295] env[63345]: DEBUG nova.network.neutron [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1104.732466] env[63345]: DEBUG nova.objects.instance [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lazy-loading 'info_cache' on Instance uuid 7245e83c-2dda-4b2f-8a65-07f7e4d6828a {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1104.872076] env[63345]: DEBUG nova.scheduler.client.report [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1105.072073] env[63345]: DEBUG nova.compute.manager [req-70a2ef1a-1a77-4531-950a-ebbbe0b4a63a req-e9e07162-3c62-4857-b7a8-10f88e86d815 service nova] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Received event network-changed-104a12d2-9632-4d24-a0e3-d4b18e907a58 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1105.072289] env[63345]: DEBUG nova.compute.manager [req-70a2ef1a-1a77-4531-950a-ebbbe0b4a63a req-e9e07162-3c62-4857-b7a8-10f88e86d815 service nova] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Refreshing instance network info cache due to event network-changed-104a12d2-9632-4d24-a0e3-d4b18e907a58. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1105.072504] env[63345]: DEBUG oslo_concurrency.lockutils [req-70a2ef1a-1a77-4531-950a-ebbbe0b4a63a req-e9e07162-3c62-4857-b7a8-10f88e86d815 service nova] Acquiring lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1105.072650] env[63345]: DEBUG oslo_concurrency.lockutils [req-70a2ef1a-1a77-4531-950a-ebbbe0b4a63a req-e9e07162-3c62-4857-b7a8-10f88e86d815 service nova] Acquired lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.072814] env[63345]: DEBUG nova.network.neutron [req-70a2ef1a-1a77-4531-950a-ebbbe0b4a63a req-e9e07162-3c62-4857-b7a8-10f88e86d815 service nova] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Refreshing network info cache for port 104a12d2-9632-4d24-a0e3-d4b18e907a58 {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1105.090218] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Releasing lock "refresh_cache-148c961e-d260-4dbd-ad9f-52f94b072096" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1105.123382] env[63345]: DEBUG nova.compute.manager [req-f1b9bf97-dda6-4203-9afd-8bd97fa57ef6 req-bf0401f8-d134-4638-81fc-60ff36ff9da5 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Received event network-vif-plugged-f9b10cca-c2c3-45d2-a329-61efee5dde7f {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1105.123679] env[63345]: DEBUG oslo_concurrency.lockutils [req-f1b9bf97-dda6-4203-9afd-8bd97fa57ef6 req-bf0401f8-d134-4638-81fc-60ff36ff9da5 service nova] Acquiring lock "95738bee-d291-4f27-aeff-9445939bb3fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.123901] env[63345]: DEBUG oslo_concurrency.lockutils [req-f1b9bf97-dda6-4203-9afd-8bd97fa57ef6 req-bf0401f8-d134-4638-81fc-60ff36ff9da5 service nova] Lock "95738bee-d291-4f27-aeff-9445939bb3fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.124086] env[63345]: DEBUG oslo_concurrency.lockutils [req-f1b9bf97-dda6-4203-9afd-8bd97fa57ef6 req-bf0401f8-d134-4638-81fc-60ff36ff9da5 service nova] Lock "95738bee-d291-4f27-aeff-9445939bb3fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.124294] env[63345]: DEBUG nova.compute.manager [req-f1b9bf97-dda6-4203-9afd-8bd97fa57ef6 req-bf0401f8-d134-4638-81fc-60ff36ff9da5 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] No waiting events found dispatching network-vif-plugged-f9b10cca-c2c3-45d2-a329-61efee5dde7f {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1105.124422] env[63345]: WARNING nova.compute.manager [req-f1b9bf97-dda6-4203-9afd-8bd97fa57ef6 req-bf0401f8-d134-4638-81fc-60ff36ff9da5 service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Received unexpected event network-vif-plugged-f9b10cca-c2c3-45d2-a329-61efee5dde7f for instance with vm_state shelved_offloaded and task_state spawning. [ 1105.234181] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1105.234427] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquired lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.234618] env[63345]: DEBUG nova.network.neutron [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1105.379413] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.695s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.401503] env[63345]: INFO nova.scheduler.client.report [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Deleted allocations for instance 95ef4f91-a618-4ae2-95ad-d027c031f239 [ 1105.908280] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fa44871e-27bc-47bd-b816-5d1226c6a854 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "95ef4f91-a618-4ae2-95ad-d027c031f239" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.002s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.922523] env[63345]: DEBUG nova.network.neutron [req-70a2ef1a-1a77-4531-950a-ebbbe0b4a63a req-e9e07162-3c62-4857-b7a8-10f88e86d815 service nova] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updated VIF entry in instance network info cache for port 104a12d2-9632-4d24-a0e3-d4b18e907a58. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1105.923182] env[63345]: DEBUG nova.network.neutron [req-70a2ef1a-1a77-4531-950a-ebbbe0b4a63a req-e9e07162-3c62-4857-b7a8-10f88e86d815 service nova] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating instance_info_cache with network_info: [{"id": "104a12d2-9632-4d24-a0e3-d4b18e907a58", "address": "fa:16:3e:a9:14:31", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap104a12d2-96", "ovs_interfaceid": "104a12d2-9632-4d24-a0e3-d4b18e907a58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.973749] env[63345]: DEBUG nova.network.neutron [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Updating instance_info_cache with network_info: [{"id": "f9b10cca-c2c3-45d2-a329-61efee5dde7f", "address": "fa:16:3e:31:f2:fb", "network": {"id": "95d95c9b-b21c-4ee5-ab54-d0bf2699d38e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-88421441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba08f64c26d245a8b8f2b52ea97c2f1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9b10cca-c2", "ovs_interfaceid": "f9b10cca-c2c3-45d2-a329-61efee5dde7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.029570] env[63345]: DEBUG nova.network.neutron [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Updating instance_info_cache with network_info: [{"id": "5709e20d-8dfc-41ae-981d-01de437144e3", "address": "fa:16:3e:43:47:ce", "network": {"id": "d7581fd9-99cb-4847-b9da-a659a40e1d52", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1100696493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c535ae9067ab4e8a87e95c68af4624fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5709e20d-8d", "ovs_interfaceid": "5709e20d-8dfc-41ae-981d-01de437144e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.123051] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1106.123393] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8a8f450-a471-4530-863b-0d5b07fb1e66 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.132105] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1106.132105] env[63345]: value = "task-1017896" [ 1106.132105] env[63345]: _type = "Task" [ 1106.132105] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.141266] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017896, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.425668] env[63345]: DEBUG oslo_concurrency.lockutils [req-70a2ef1a-1a77-4531-950a-ebbbe0b4a63a req-e9e07162-3c62-4857-b7a8-10f88e86d815 service nova] Releasing lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1106.478891] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Releasing lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1106.508926] env[63345]: DEBUG nova.virt.hardware [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='8940cdc2c465d7d095dfc23d52b257ff',container_format='bare',created_at=2024-09-30T09:43:41Z,direct_url=,disk_format='vmdk',id=00f54121-1c47-489a-9345-a57300eace29,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1155927035-shelved',owner='ba08f64c26d245a8b8f2b52ea97c2f1a',properties=ImageMetaProps,protected=,size=31665664,status='active',tags=,updated_at=2024-09-30T09:43:56Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1106.509313] env[63345]: DEBUG nova.virt.hardware [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1106.509541] env[63345]: DEBUG nova.virt.hardware [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1106.509807] env[63345]: DEBUG nova.virt.hardware [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1106.510034] env[63345]: DEBUG nova.virt.hardware [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1106.510259] env[63345]: DEBUG nova.virt.hardware [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1106.510560] env[63345]: DEBUG nova.virt.hardware [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1106.510839] env[63345]: DEBUG nova.virt.hardware [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1106.511151] env[63345]: DEBUG nova.virt.hardware [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1106.511415] env[63345]: DEBUG nova.virt.hardware [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1106.511669] env[63345]: DEBUG nova.virt.hardware [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1106.512870] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59cc02b9-2ea3-4f16-a35b-d91d7480802c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.521756] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b484371-60a4-43db-bc9e-86dfb8a231ae {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.539508] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Releasing lock "refresh_cache-7245e83c-2dda-4b2f-8a65-07f7e4d6828a" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1106.539833] env[63345]: DEBUG nova.objects.instance [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lazy-loading 'migration_context' on Instance uuid 7245e83c-2dda-4b2f-8a65-07f7e4d6828a {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1106.544652] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:f2:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f9b10cca-c2c3-45d2-a329-61efee5dde7f', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1106.549835] env[63345]: DEBUG oslo.service.loopingcall [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1106.550637] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1106.550878] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3169e4b4-5bc0-4fa7-a5d6-ef3d63a629d3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.574392] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1106.574392] env[63345]: value = "task-1017897" [ 1106.574392] env[63345]: _type = "Task" [ 1106.574392] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.584163] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017897, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.642938] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017896, 'name': PowerOffVM_Task, 'duration_secs': 0.459302} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.643276] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1106.644168] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d9d9ac-d034-41f3-8f30-4a8f1851af96 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.662832] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ecede1-5a97-44f0-b53d-8b335971488e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.694775] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1106.695376] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c122911-3252-4307-8584-a16b4ebacb52 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.703490] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1106.703490] env[63345]: value = "task-1017898" [ 1106.703490] env[63345]: _type = "Task" [ 1106.703490] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.711685] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017898, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.051932] env[63345]: DEBUG nova.objects.base [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Object Instance<7245e83c-2dda-4b2f-8a65-07f7e4d6828a> lazy-loaded attributes: info_cache,migration_context {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1107.052979] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4180cea-c402-4152-bcc3-c5e76289f0e9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.080226] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2946908-4b3a-47e5-8644-323c7015daaf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.086950] env[63345]: DEBUG oslo_vmware.api [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1107.086950] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52cc5947-52f2-4ff0-db8f-9ff225187a34" [ 1107.086950] env[63345]: _type = "Task" [ 1107.086950] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.090563] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017897, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.100408] env[63345]: DEBUG oslo_vmware.api [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52cc5947-52f2-4ff0-db8f-9ff225187a34, 'name': SearchDatastore_Task, 'duration_secs': 0.011517} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.100827] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.100973] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.155278] env[63345]: DEBUG nova.compute.manager [req-7a056fe5-ca84-4f5d-9770-aa42a4aac5f1 req-29497a96-944f-4e1d-8c5e-4d130cef08aa service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Received event network-changed-f9b10cca-c2c3-45d2-a329-61efee5dde7f {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1107.155871] env[63345]: DEBUG nova.compute.manager [req-7a056fe5-ca84-4f5d-9770-aa42a4aac5f1 req-29497a96-944f-4e1d-8c5e-4d130cef08aa service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Refreshing instance network info cache due to event network-changed-f9b10cca-c2c3-45d2-a329-61efee5dde7f. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1107.156708] env[63345]: DEBUG oslo_concurrency.lockutils [req-7a056fe5-ca84-4f5d-9770-aa42a4aac5f1 req-29497a96-944f-4e1d-8c5e-4d130cef08aa service nova] Acquiring lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1107.157060] env[63345]: DEBUG oslo_concurrency.lockutils [req-7a056fe5-ca84-4f5d-9770-aa42a4aac5f1 req-29497a96-944f-4e1d-8c5e-4d130cef08aa service nova] Acquired lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.157394] env[63345]: DEBUG nova.network.neutron [req-7a056fe5-ca84-4f5d-9770-aa42a4aac5f1 req-29497a96-944f-4e1d-8c5e-4d130cef08aa service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Refreshing network info cache for port f9b10cca-c2c3-45d2-a329-61efee5dde7f {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1107.214271] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] VM already powered off {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1107.214503] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1107.214817] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1107.214958] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.215183] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1107.215419] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57937143-0c02-44ea-a07c-7e926cf1ecaa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.226686] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1107.226686] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1107.227479] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3261fbaa-de6a-40f0-ba2a-fba85f1c20e9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.234061] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1107.234061] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5254c5ca-d42a-3bf0-6bca-aee3fc2cb4cf" [ 1107.234061] env[63345]: _type = "Task" [ 1107.234061] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.244462] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5254c5ca-d42a-3bf0-6bca-aee3fc2cb4cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.585949] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017897, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.705383] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6135ad-eb7b-4ffc-82c8-041fb85be34f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.713867] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326ccfa6-dac6-4c74-b23e-3c6fceaba304 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.748981] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1997fddd-6f3a-47e1-89d0-aeb3f2451026 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.759471] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5254c5ca-d42a-3bf0-6bca-aee3fc2cb4cf, 'name': SearchDatastore_Task, 'duration_secs': 0.014803} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.761150] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f977a789-9814-4fec-bffd-a2a780705b56 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.764823] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14c54251-ae31-4643-b349-e2de26373db5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.770549] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1107.770549] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f16b60-0274-18aa-853f-cd12c9c2102a" [ 1107.770549] env[63345]: _type = "Task" [ 1107.770549] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.778426] env[63345]: DEBUG nova.compute.provider_tree [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1107.788294] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f16b60-0274-18aa-853f-cd12c9c2102a, 'name': SearchDatastore_Task, 'duration_secs': 0.0101} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.789119] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1107.789385] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 148c961e-d260-4dbd-ad9f-52f94b072096/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk. {{(pid=63345) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1107.789636] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9dc91b4d-e3fe-4d58-9cef-b51100c47689 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.798154] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1107.798154] env[63345]: value = "task-1017899" [ 1107.798154] env[63345]: _type = "Task" [ 1107.798154] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.808495] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017899, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.004101] env[63345]: DEBUG nova.network.neutron [req-7a056fe5-ca84-4f5d-9770-aa42a4aac5f1 req-29497a96-944f-4e1d-8c5e-4d130cef08aa service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Updated VIF entry in instance network info cache for port f9b10cca-c2c3-45d2-a329-61efee5dde7f. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1108.004491] env[63345]: DEBUG nova.network.neutron [req-7a056fe5-ca84-4f5d-9770-aa42a4aac5f1 req-29497a96-944f-4e1d-8c5e-4d130cef08aa service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Updating instance_info_cache with network_info: [{"id": "f9b10cca-c2c3-45d2-a329-61efee5dde7f", "address": "fa:16:3e:31:f2:fb", "network": {"id": "95d95c9b-b21c-4ee5-ab54-d0bf2699d38e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-88421441-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba08f64c26d245a8b8f2b52ea97c2f1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9b10cca-c2", "ovs_interfaceid": "f9b10cca-c2c3-45d2-a329-61efee5dde7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.087060] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017897, 'name': CreateVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.284610] env[63345]: DEBUG nova.scheduler.client.report [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1108.310975] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017899, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.507340] env[63345]: DEBUG oslo_concurrency.lockutils [req-7a056fe5-ca84-4f5d-9770-aa42a4aac5f1 req-29497a96-944f-4e1d-8c5e-4d130cef08aa service nova] Releasing lock "refresh_cache-95738bee-d291-4f27-aeff-9445939bb3fa" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1108.587276] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017897, 'name': CreateVM_Task, 'duration_secs': 1.637308} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.587448] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1108.588161] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/00f54121-1c47-489a-9345-a57300eace29" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.588336] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquired lock "[datastore2] devstack-image-cache_base/00f54121-1c47-489a-9345-a57300eace29" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.588734] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/00f54121-1c47-489a-9345-a57300eace29" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1108.589008] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0252105-9f1f-4521-a8c8-3b4a2f446e32 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.593911] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1108.593911] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]524515b8-cb59-d8ab-a00e-c73f0d5a3b36" [ 1108.593911] env[63345]: _type = "Task" [ 1108.593911] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.602156] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]524515b8-cb59-d8ab-a00e-c73f0d5a3b36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.693173] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1108.693768] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.810734] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017899, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.683409} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.811080] env[63345]: INFO nova.virt.vmwareapi.ds_util [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 148c961e-d260-4dbd-ad9f-52f94b072096/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk. [ 1108.811778] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d969d043-ef18-4767-a24a-e8dc03fc0a62 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.837391] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 148c961e-d260-4dbd-ad9f-52f94b072096/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1108.837706] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bda51253-fdb1-436f-bdbe-1b4378f1d475 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.856695] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1108.856695] env[63345]: value = "task-1017900" [ 1108.856695] env[63345]: _type = "Task" [ 1108.856695] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.865288] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017900, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.104458] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Releasing lock "[datastore2] devstack-image-cache_base/00f54121-1c47-489a-9345-a57300eace29" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1109.104831] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Processing image 00f54121-1c47-489a-9345-a57300eace29 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1109.104941] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/00f54121-1c47-489a-9345-a57300eace29/00f54121-1c47-489a-9345-a57300eace29.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1109.105108] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquired lock "[datastore2] devstack-image-cache_base/00f54121-1c47-489a-9345-a57300eace29/00f54121-1c47-489a-9345-a57300eace29.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.105300] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1109.105548] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58ae526c-7d84-407d-b52c-b0a3f084eeaf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.114775] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1109.114977] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1109.115693] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f9cc044-3545-4ca7-ab15-8a9e3ea274d3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.123454] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1109.123454] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52ba7682-e348-d3aa-12f8-a1bdbe191188" [ 1109.123454] env[63345]: _type = "Task" [ 1109.123454] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.131093] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52ba7682-e348-d3aa-12f8-a1bdbe191188, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.196854] env[63345]: DEBUG nova.compute.manager [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1109.295611] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.194s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.367192] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017900, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.634665] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Preparing fetch location {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1109.634932] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Fetch image to [datastore2] OSTACK_IMG_bbfca8e0-768b-4f0e-aca9-6ba33d5487c8/OSTACK_IMG_bbfca8e0-768b-4f0e-aca9-6ba33d5487c8.vmdk {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1109.635153] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Downloading stream optimized image 00f54121-1c47-489a-9345-a57300eace29 to [datastore2] OSTACK_IMG_bbfca8e0-768b-4f0e-aca9-6ba33d5487c8/OSTACK_IMG_bbfca8e0-768b-4f0e-aca9-6ba33d5487c8.vmdk on the data store datastore2 as vApp {{(pid=63345) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1109.635339] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Downloading image file data 00f54121-1c47-489a-9345-a57300eace29 to the ESX as VM named 'OSTACK_IMG_bbfca8e0-768b-4f0e-aca9-6ba33d5487c8' {{(pid=63345) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1109.706081] env[63345]: DEBUG oslo_vmware.rw_handles [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1109.706081] env[63345]: value = "resgroup-9" [ 1109.706081] env[63345]: _type = "ResourcePool" [ 1109.706081] env[63345]: }. {{(pid=63345) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1109.707245] env[63345]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-1ed652f9-5622-412e-913c-1ad47b14876c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.723510] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.723763] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.725364] env[63345]: INFO nova.compute.claims [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1109.733727] env[63345]: DEBUG oslo_vmware.rw_handles [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lease: (returnval){ [ 1109.733727] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d6153d-15dd-7dbe-975a-e02fc62845d2" [ 1109.733727] env[63345]: _type = "HttpNfcLease" [ 1109.733727] env[63345]: } obtained for vApp import into resource pool (val){ [ 1109.733727] env[63345]: value = "resgroup-9" [ 1109.733727] env[63345]: _type = "ResourcePool" [ 1109.733727] env[63345]: }. {{(pid=63345) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1109.734032] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the lease: (returnval){ [ 1109.734032] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d6153d-15dd-7dbe-975a-e02fc62845d2" [ 1109.734032] env[63345]: _type = "HttpNfcLease" [ 1109.734032] env[63345]: } to be ready. {{(pid=63345) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1109.741017] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1109.741017] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d6153d-15dd-7dbe-975a-e02fc62845d2" [ 1109.741017] env[63345]: _type = "HttpNfcLease" [ 1109.741017] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1109.855536] env[63345]: INFO nova.scheduler.client.report [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleted allocation for migration 338a27f0-4a04-4b10-9b24-9ad29ee1e81a [ 1109.870124] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017900, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.242557] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1110.242557] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d6153d-15dd-7dbe-975a-e02fc62845d2" [ 1110.242557] env[63345]: _type = "HttpNfcLease" [ 1110.242557] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1110.364928] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "7245e83c-2dda-4b2f-8a65-07f7e4d6828a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.319s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.370020] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017900, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.744790] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1110.744790] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d6153d-15dd-7dbe-975a-e02fc62845d2" [ 1110.744790] env[63345]: _type = "HttpNfcLease" [ 1110.744790] env[63345]: } is ready. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1110.745210] env[63345]: DEBUG oslo_vmware.rw_handles [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1110.745210] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d6153d-15dd-7dbe-975a-e02fc62845d2" [ 1110.745210] env[63345]: _type = "HttpNfcLease" [ 1110.745210] env[63345]: }. {{(pid=63345) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1110.746070] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fdfd680-6668-42d8-8fd3-76d652b9fa87 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.755573] env[63345]: DEBUG oslo_vmware.rw_handles [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52829b54-75e4-a2d2-5258-0c2a7847fecd/disk-0.vmdk from lease info. {{(pid=63345) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1110.755782] env[63345]: DEBUG oslo_vmware.rw_handles [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Creating HTTP connection to write to file with size = 31665664 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52829b54-75e4-a2d2-5258-0c2a7847fecd/disk-0.vmdk. {{(pid=63345) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1110.822233] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b2b57205-f7e0-455e-a067-b4d84ca03017 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.874480] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017900, 'name': ReconfigVM_Task, 'duration_secs': 1.584525} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.877318] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 148c961e-d260-4dbd-ad9f-52f94b072096/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1110.878437] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d09c4a-fafa-42fe-8b4a-dacf06a5b496 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.905326] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d5a06eb-4195-437e-809d-6022b1837f94 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.916053] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24fba83b-8475-4322-80d3-a39c83cc2a0a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.927110] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3d8e40-c0ff-4834-80d9-a1fbf2bc2021 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.930764] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1110.930764] env[63345]: value = "task-1017902" [ 1110.930764] env[63345]: _type = "Task" [ 1110.930764] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.965866] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23783c04-7c91-4705-b774-ead0b0bfdc47 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.968908] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017902, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.977814] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a118cd7-803f-43c5-9e7e-c742245d4aaf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.994284] env[63345]: DEBUG nova.compute.provider_tree [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1111.442451] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017902, 'name': ReconfigVM_Task, 'duration_secs': 0.202758} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.442836] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1111.442926] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3893686d-8d36-4aa4-b9dc-328be1960075 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.452433] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1111.452433] env[63345]: value = "task-1017903" [ 1111.452433] env[63345]: _type = "Task" [ 1111.452433] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.463823] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017903, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.521079] env[63345]: ERROR nova.scheduler.client.report [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [req-d80aab80-ebef-4588-ac36-9a9cc5d68106] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d80aab80-ebef-4588-ac36-9a9cc5d68106"}]} [ 1111.538425] env[63345]: DEBUG nova.scheduler.client.report [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1111.555803] env[63345]: DEBUG nova.scheduler.client.report [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1111.556085] env[63345]: DEBUG nova.compute.provider_tree [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1111.569581] env[63345]: DEBUG nova.scheduler.client.report [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1111.572550] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "7245e83c-2dda-4b2f-8a65-07f7e4d6828a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.572654] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "7245e83c-2dda-4b2f-8a65-07f7e4d6828a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.572905] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "7245e83c-2dda-4b2f-8a65-07f7e4d6828a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.573143] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "7245e83c-2dda-4b2f-8a65-07f7e4d6828a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.573372] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "7245e83c-2dda-4b2f-8a65-07f7e4d6828a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.577143] env[63345]: INFO nova.compute.manager [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Terminating instance [ 1111.593067] env[63345]: DEBUG nova.scheduler.client.report [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1111.708346] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f18ab5-d83b-4e14-9535-0e2c05c78a37 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.719826] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7587ea15-8d52-4c49-b229-012bb66a42fa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.760657] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b17099-a81e-4fa5-8ede-3898bfeebc08 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.769711] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f161918-c842-46a0-9431-bed6ec33b941 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.787652] env[63345]: DEBUG nova.compute.provider_tree [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1111.954753] env[63345]: DEBUG oslo_vmware.rw_handles [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Completed reading data from the image iterator. {{(pid=63345) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1111.954999] env[63345]: DEBUG oslo_vmware.rw_handles [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52829b54-75e4-a2d2-5258-0c2a7847fecd/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1111.959653] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edbf305d-8758-423a-bad7-07a06759c648 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.967085] env[63345]: DEBUG oslo_vmware.rw_handles [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52829b54-75e4-a2d2-5258-0c2a7847fecd/disk-0.vmdk is in state: ready. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1111.967298] env[63345]: DEBUG oslo_vmware.rw_handles [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52829b54-75e4-a2d2-5258-0c2a7847fecd/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1111.970391] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-74a065ff-4b04-460f-a348-56e6679cbe2c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.971951] env[63345]: DEBUG oslo_vmware.api [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017903, 'name': PowerOnVM_Task, 'duration_secs': 0.431696} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.972269] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1111.975066] env[63345]: DEBUG nova.compute.manager [None req-d161e50f-597a-4aab-bfef-d47bba7681fe tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1111.975866] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27abcd94-3d16-4dbe-ab12-e50efd219934 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.081314] env[63345]: DEBUG nova.compute.manager [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1112.081500] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1112.082528] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c78cab-0dc1-4743-a2d4-06214779668a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.091885] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1112.092181] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11147b89-c13c-4be4-8a5c-d0d89c9a5f11 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.100210] env[63345]: DEBUG oslo_vmware.api [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1112.100210] env[63345]: value = "task-1017904" [ 1112.100210] env[63345]: _type = "Task" [ 1112.100210] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.110022] env[63345]: DEBUG oslo_vmware.api [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017904, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.156060] env[63345]: DEBUG oslo_vmware.rw_handles [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52829b54-75e4-a2d2-5258-0c2a7847fecd/disk-0.vmdk. {{(pid=63345) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1112.156060] env[63345]: INFO nova.virt.vmwareapi.images [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Downloaded image file data 00f54121-1c47-489a-9345-a57300eace29 [ 1112.156890] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70628826-e0ec-4018-9e42-f22b140e94f3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.173199] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c51f7a2c-e5ae-4637-be62-4b6367483b8b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.203847] env[63345]: INFO nova.virt.vmwareapi.images [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] The imported VM was unregistered [ 1112.206597] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Caching image {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1112.206811] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Creating directory with path [datastore2] devstack-image-cache_base/00f54121-1c47-489a-9345-a57300eace29 {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1112.207124] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d015a1ad-b464-4848-a57c-04e8bf95b91e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.217909] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Created directory with path [datastore2] devstack-image-cache_base/00f54121-1c47-489a-9345-a57300eace29 {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1112.218110] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_bbfca8e0-768b-4f0e-aca9-6ba33d5487c8/OSTACK_IMG_bbfca8e0-768b-4f0e-aca9-6ba33d5487c8.vmdk to [datastore2] devstack-image-cache_base/00f54121-1c47-489a-9345-a57300eace29/00f54121-1c47-489a-9345-a57300eace29.vmdk. {{(pid=63345) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1112.218374] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-40a6d0fb-9ebf-4d2b-963f-de04a2762504 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.225961] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1112.225961] env[63345]: value = "task-1017906" [ 1112.225961] env[63345]: _type = "Task" [ 1112.225961] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.233990] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017906, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.321583] env[63345]: DEBUG nova.scheduler.client.report [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 163 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1112.321931] env[63345]: DEBUG nova.compute.provider_tree [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 163 to 164 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1112.322146] env[63345]: DEBUG nova.compute.provider_tree [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1112.611046] env[63345]: DEBUG oslo_vmware.api [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017904, 'name': PowerOffVM_Task, 'duration_secs': 0.355027} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.611421] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1112.611603] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1112.611821] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-88f97804-1f1e-4f69-b25f-2b85ee583bdc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.724326] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1112.724540] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1112.724865] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleting the datastore file [datastore2] 7245e83c-2dda-4b2f-8a65-07f7e4d6828a {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1112.725220] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11ef5562-9616-459e-9a36-a4309ee30047 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.740746] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017906, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.742410] env[63345]: DEBUG oslo_vmware.api [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for the task: (returnval){ [ 1112.742410] env[63345]: value = "task-1017908" [ 1112.742410] env[63345]: _type = "Task" [ 1112.742410] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.751732] env[63345]: DEBUG oslo_vmware.api [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017908, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.827707] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.104s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.828333] env[63345]: DEBUG nova.compute.manager [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1113.239826] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017906, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.252243] env[63345]: DEBUG oslo_vmware.api [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017908, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.334061] env[63345]: DEBUG nova.compute.utils [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1113.335877] env[63345]: DEBUG nova.compute.manager [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1113.336144] env[63345]: DEBUG nova.network.neutron [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1113.393059] env[63345]: INFO nova.compute.manager [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Unrescuing [ 1113.393059] env[63345]: DEBUG oslo_concurrency.lockutils [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "refresh_cache-148c961e-d260-4dbd-ad9f-52f94b072096" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1113.393298] env[63345]: DEBUG oslo_concurrency.lockutils [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired lock "refresh_cache-148c961e-d260-4dbd-ad9f-52f94b072096" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.393298] env[63345]: DEBUG nova.network.neutron [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1113.449985] env[63345]: DEBUG nova.policy [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '738e7097762c42d490a66c3d86af9635', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '41afa63287424a549133615eb390bac7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 1113.739239] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017906, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.752641] env[63345]: DEBUG oslo_vmware.api [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017908, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.818815] env[63345]: DEBUG nova.network.neutron [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Successfully created port: 5e13d081-150b-4a13-a4c9-54ea78065ffe {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1113.840144] env[63345]: DEBUG nova.compute.manager [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1114.067894] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.068980] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.111390] env[63345]: DEBUG nova.network.neutron [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Updating instance_info_cache with network_info: [{"id": "f2837ec1-0df3-454a-bc68-fb0ca9562eb4", "address": "fa:16:3e:85:d4:0e", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2837ec1-0d", "ovs_interfaceid": "f2837ec1-0df3-454a-bc68-fb0ca9562eb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.239908] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017906, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.253914] env[63345]: DEBUG oslo_vmware.api [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017908, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.577419] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.577556] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Starting heal instance info cache {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 1114.577667] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Rebuilding the list of instances to heal {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10261}} [ 1114.613842] env[63345]: DEBUG oslo_concurrency.lockutils [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Releasing lock "refresh_cache-148c961e-d260-4dbd-ad9f-52f94b072096" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1114.614573] env[63345]: DEBUG nova.objects.instance [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lazy-loading 'flavor' on Instance uuid 148c961e-d260-4dbd-ad9f-52f94b072096 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1114.740412] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017906, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.359682} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.740723] env[63345]: INFO nova.virt.vmwareapi.ds_util [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_bbfca8e0-768b-4f0e-aca9-6ba33d5487c8/OSTACK_IMG_bbfca8e0-768b-4f0e-aca9-6ba33d5487c8.vmdk to [datastore2] devstack-image-cache_base/00f54121-1c47-489a-9345-a57300eace29/00f54121-1c47-489a-9345-a57300eace29.vmdk. [ 1114.740958] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Cleaning up location [datastore2] OSTACK_IMG_bbfca8e0-768b-4f0e-aca9-6ba33d5487c8 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1114.741163] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_bbfca8e0-768b-4f0e-aca9-6ba33d5487c8 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1114.741435] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8884767e-c4d6-4df4-8d1f-fd703be23e37 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.748944] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1114.748944] env[63345]: value = "task-1017909" [ 1114.748944] env[63345]: _type = "Task" [ 1114.748944] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.755594] env[63345]: DEBUG oslo_vmware.api [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Task: {'id': task-1017908, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.589959} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.756155] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1114.756369] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1114.756556] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1114.756735] env[63345]: INFO nova.compute.manager [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Took 2.68 seconds to destroy the instance on the hypervisor. [ 1114.756972] env[63345]: DEBUG oslo.service.loopingcall [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1114.757515] env[63345]: DEBUG nova.compute.manager [-] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1114.757617] env[63345]: DEBUG nova.network.neutron [-] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1114.761809] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017909, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.852340] env[63345]: DEBUG nova.compute.manager [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1114.877889] env[63345]: DEBUG nova.virt.hardware [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1114.878164] env[63345]: DEBUG nova.virt.hardware [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1114.878334] env[63345]: DEBUG nova.virt.hardware [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1114.878523] env[63345]: DEBUG nova.virt.hardware [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1114.878677] env[63345]: DEBUG nova.virt.hardware [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1114.878828] env[63345]: DEBUG nova.virt.hardware [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1114.879081] env[63345]: DEBUG nova.virt.hardware [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1114.879220] env[63345]: DEBUG nova.virt.hardware [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1114.879395] env[63345]: DEBUG nova.virt.hardware [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1114.879565] env[63345]: DEBUG nova.virt.hardware [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1114.879744] env[63345]: DEBUG nova.virt.hardware [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1114.880626] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cbaaa8a-837b-42f4-b3da-a4bbf98f714c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.890616] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7255e332-143d-4066-aea0-5681e5692822 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.008360] env[63345]: DEBUG nova.compute.manager [req-21ab8939-f92d-4183-9f9e-a22ff4dd9614 req-a12aac80-889d-470a-ac21-8b43c420bf19 service nova] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Received event network-vif-deleted-5709e20d-8dfc-41ae-981d-01de437144e3 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1115.008492] env[63345]: INFO nova.compute.manager [req-21ab8939-f92d-4183-9f9e-a22ff4dd9614 req-a12aac80-889d-470a-ac21-8b43c420bf19 service nova] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Neutron deleted interface 5709e20d-8dfc-41ae-981d-01de437144e3; detaching it from the instance and deleting it from the info cache [ 1115.008671] env[63345]: DEBUG nova.network.neutron [req-21ab8939-f92d-4183-9f9e-a22ff4dd9614 req-a12aac80-889d-470a-ac21-8b43c420bf19 service nova] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.081322] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Skipping network cache update for instance because it is Building. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 1115.109076] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "refresh_cache-22a11cf9-8f85-4371-98eb-25b267c9aff7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1115.109241] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquired lock "refresh_cache-22a11cf9-8f85-4371-98eb-25b267c9aff7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.109391] env[63345]: DEBUG nova.network.neutron [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Forcefully refreshing network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1115.109544] env[63345]: DEBUG nova.objects.instance [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lazy-loading 'info_cache' on Instance uuid 22a11cf9-8f85-4371-98eb-25b267c9aff7 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1115.119579] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6065cfb8-8b34-43d4-8ee6-4077d7846ace {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.142765] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1115.143107] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2255112f-b126-4325-a067-3f82312e87c9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.150328] env[63345]: DEBUG oslo_vmware.api [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1115.150328] env[63345]: value = "task-1017910" [ 1115.150328] env[63345]: _type = "Task" [ 1115.150328] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.158946] env[63345]: DEBUG oslo_vmware.api [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017910, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.260314] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017909, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.037113} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.260545] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1115.260720] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Releasing lock "[datastore2] devstack-image-cache_base/00f54121-1c47-489a-9345-a57300eace29/00f54121-1c47-489a-9345-a57300eace29.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1115.261032] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/00f54121-1c47-489a-9345-a57300eace29/00f54121-1c47-489a-9345-a57300eace29.vmdk to [datastore2] 95738bee-d291-4f27-aeff-9445939bb3fa/95738bee-d291-4f27-aeff-9445939bb3fa.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1115.261308] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eacebbcf-b151-4e1a-a1e1-962afe9d01e1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.269489] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1115.269489] env[63345]: value = "task-1017911" [ 1115.269489] env[63345]: _type = "Task" [ 1115.269489] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.278520] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017911, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.393411] env[63345]: DEBUG nova.compute.manager [req-0f100215-7abb-492a-af91-9e46ad4572ac req-871adbfd-29a7-47a2-b9f3-21e41dd712c6 service nova] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Received event network-vif-plugged-5e13d081-150b-4a13-a4c9-54ea78065ffe {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1115.393738] env[63345]: DEBUG oslo_concurrency.lockutils [req-0f100215-7abb-492a-af91-9e46ad4572ac req-871adbfd-29a7-47a2-b9f3-21e41dd712c6 service nova] Acquiring lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.393981] env[63345]: DEBUG oslo_concurrency.lockutils [req-0f100215-7abb-492a-af91-9e46ad4572ac req-871adbfd-29a7-47a2-b9f3-21e41dd712c6 service nova] Lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1115.394227] env[63345]: DEBUG oslo_concurrency.lockutils [req-0f100215-7abb-492a-af91-9e46ad4572ac req-871adbfd-29a7-47a2-b9f3-21e41dd712c6 service nova] Lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.394444] env[63345]: DEBUG nova.compute.manager [req-0f100215-7abb-492a-af91-9e46ad4572ac req-871adbfd-29a7-47a2-b9f3-21e41dd712c6 service nova] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] No waiting events found dispatching network-vif-plugged-5e13d081-150b-4a13-a4c9-54ea78065ffe {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1115.394850] env[63345]: WARNING nova.compute.manager [req-0f100215-7abb-492a-af91-9e46ad4572ac req-871adbfd-29a7-47a2-b9f3-21e41dd712c6 service nova] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Received unexpected event network-vif-plugged-5e13d081-150b-4a13-a4c9-54ea78065ffe for instance with vm_state building and task_state spawning. [ 1115.487913] env[63345]: DEBUG nova.network.neutron [-] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.511327] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-efa2a4bc-feda-40ba-9535-075702eeeb74 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.514258] env[63345]: DEBUG nova.network.neutron [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Successfully updated port: 5e13d081-150b-4a13-a4c9-54ea78065ffe {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1115.526639] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a5bbeb-7c00-4ae6-a6e2-da64ec2c7875 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.563630] env[63345]: DEBUG nova.compute.manager [req-21ab8939-f92d-4183-9f9e-a22ff4dd9614 req-a12aac80-889d-470a-ac21-8b43c420bf19 service nova] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Detach interface failed, port_id=5709e20d-8dfc-41ae-981d-01de437144e3, reason: Instance 7245e83c-2dda-4b2f-8a65-07f7e4d6828a could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1115.664866] env[63345]: DEBUG oslo_vmware.api [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017910, 'name': PowerOffVM_Task, 'duration_secs': 0.376724} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.664996] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1115.670425] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Reconfiguring VM instance instance-0000006f to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1115.671064] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8f27753-3c28-4508-8237-584eb8d39bf7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.691855] env[63345]: DEBUG oslo_vmware.api [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1115.691855] env[63345]: value = "task-1017912" [ 1115.691855] env[63345]: _type = "Task" [ 1115.691855] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.702293] env[63345]: DEBUG oslo_vmware.api [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017912, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.780887] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017911, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.991182] env[63345]: INFO nova.compute.manager [-] [instance: 7245e83c-2dda-4b2f-8a65-07f7e4d6828a] Took 1.23 seconds to deallocate network for instance. [ 1116.019991] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "refresh_cache-735c5f4f-98c1-4c75-bb82-66e49b0233f6" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1116.019991] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquired lock "refresh_cache-735c5f4f-98c1-4c75-bb82-66e49b0233f6" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.019991] env[63345]: DEBUG nova.network.neutron [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1116.202337] env[63345]: DEBUG oslo_vmware.api [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017912, 'name': ReconfigVM_Task, 'duration_secs': 0.446248} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.202633] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Reconfigured VM instance instance-0000006f to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1116.202831] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1116.203120] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6fad5465-c997-425a-8a54-5940bcb1f98a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.211274] env[63345]: DEBUG oslo_vmware.api [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1116.211274] env[63345]: value = "task-1017913" [ 1116.211274] env[63345]: _type = "Task" [ 1116.211274] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.220601] env[63345]: DEBUG oslo_vmware.api [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017913, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.281691] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017911, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.499588] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1116.499874] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.500138] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.524141] env[63345]: INFO nova.scheduler.client.report [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Deleted allocations for instance 7245e83c-2dda-4b2f-8a65-07f7e4d6828a [ 1116.560454] env[63345]: DEBUG nova.network.neutron [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1116.723545] env[63345]: DEBUG oslo_vmware.api [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017913, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.780054] env[63345]: DEBUG nova.network.neutron [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Updating instance_info_cache with network_info: [{"id": "5e13d081-150b-4a13-a4c9-54ea78065ffe", "address": "fa:16:3e:55:b1:df", "network": {"id": "372a3368-2d7a-4380-b811-7ad477d85250", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-454648225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41afa63287424a549133615eb390bac7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e13d081-15", "ovs_interfaceid": "5e13d081-150b-4a13-a4c9-54ea78065ffe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.791163] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017911, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.905388] env[63345]: DEBUG nova.network.neutron [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Updating instance_info_cache with network_info: [{"id": "267ad158-547a-4d3a-a838-3d964626d731", "address": "fa:16:3e:9e:ba:8d", "network": {"id": "13df4553-212e-4adb-8de0-da1acdf99671", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-238696814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4560e378b6aa47a3bbb5a2f7c5b76f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap267ad158-54", "ovs_interfaceid": "267ad158-547a-4d3a-a838-3d964626d731", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.036196] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1adf44aa-d94b-4bcc-8996-15e32d1aa635 tempest-DeleteServersTestJSON-208475450 tempest-DeleteServersTestJSON-208475450-project-member] Lock "7245e83c-2dda-4b2f-8a65-07f7e4d6828a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.463s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.225629] env[63345]: DEBUG oslo_vmware.api [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017913, 'name': PowerOnVM_Task, 'duration_secs': 0.776663} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.225793] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1117.226147] env[63345]: DEBUG nova.compute.manager [None req-73a22794-c26a-4108-8cee-c364f40044da tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1117.227054] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b1babe-d793-4579-9802-ab200063fd03 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.243215] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.243507] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1117.243766] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "22a11cf9-8f85-4371-98eb-25b267c9aff7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.243959] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1117.244165] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.246779] env[63345]: INFO nova.compute.manager [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Terminating instance [ 1117.283417] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Releasing lock "refresh_cache-735c5f4f-98c1-4c75-bb82-66e49b0233f6" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1117.283860] env[63345]: DEBUG nova.compute.manager [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Instance network_info: |[{"id": "5e13d081-150b-4a13-a4c9-54ea78065ffe", "address": "fa:16:3e:55:b1:df", "network": {"id": "372a3368-2d7a-4380-b811-7ad477d85250", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-454648225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41afa63287424a549133615eb390bac7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e13d081-15", "ovs_interfaceid": "5e13d081-150b-4a13-a4c9-54ea78065ffe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1117.287593] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:b1:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5b8af79a-31d5-4d78-93d7-3919aa1d9186', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5e13d081-150b-4a13-a4c9-54ea78065ffe', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1117.295997] env[63345]: DEBUG oslo.service.loopingcall [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1117.296753] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017911, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.297062] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1117.297302] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b55fe4b8-c8d6-40d3-b41c-34bf8f786dab {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.321727] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1117.321727] env[63345]: value = "task-1017915" [ 1117.321727] env[63345]: _type = "Task" [ 1117.321727] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.333160] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017915, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.409257] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Releasing lock "refresh_cache-22a11cf9-8f85-4371-98eb-25b267c9aff7" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1117.409535] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Updated the network info_cache for instance {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 1117.409773] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1117.409974] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1117.410210] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1117.410404] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1117.410594] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1117.410789] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._sync_power_states {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1117.425543] env[63345]: DEBUG nova.compute.manager [req-0382c80f-975b-4f18-a3b2-7a580590504c req-2cdab770-543f-4e3b-aeef-91a13d58a6cc service nova] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Received event network-changed-5e13d081-150b-4a13-a4c9-54ea78065ffe {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1117.425543] env[63345]: DEBUG nova.compute.manager [req-0382c80f-975b-4f18-a3b2-7a580590504c req-2cdab770-543f-4e3b-aeef-91a13d58a6cc service nova] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Refreshing instance network info cache due to event network-changed-5e13d081-150b-4a13-a4c9-54ea78065ffe. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1117.425818] env[63345]: DEBUG oslo_concurrency.lockutils [req-0382c80f-975b-4f18-a3b2-7a580590504c req-2cdab770-543f-4e3b-aeef-91a13d58a6cc service nova] Acquiring lock "refresh_cache-735c5f4f-98c1-4c75-bb82-66e49b0233f6" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1117.425818] env[63345]: DEBUG oslo_concurrency.lockutils [req-0382c80f-975b-4f18-a3b2-7a580590504c req-2cdab770-543f-4e3b-aeef-91a13d58a6cc service nova] Acquired lock "refresh_cache-735c5f4f-98c1-4c75-bb82-66e49b0233f6" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.425936] env[63345]: DEBUG nova.network.neutron [req-0382c80f-975b-4f18-a3b2-7a580590504c req-2cdab770-543f-4e3b-aeef-91a13d58a6cc service nova] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Refreshing network info cache for port 5e13d081-150b-4a13-a4c9-54ea78065ffe {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1117.752025] env[63345]: DEBUG nova.compute.manager [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1117.752466] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1117.753929] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82dbc812-0a9f-4c6a-affd-84356d928a3d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.766632] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1117.767938] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b9b85e2-3927-46eb-8ad5-a38b6ba5bc13 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.778645] env[63345]: DEBUG oslo_vmware.api [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1117.778645] env[63345]: value = "task-1017916" [ 1117.778645] env[63345]: _type = "Task" [ 1117.778645] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.793874] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017911, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.799262] env[63345]: DEBUG oslo_vmware.api [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017916, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.837076] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017915, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.916234] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Getting list of instances from cluster (obj){ [ 1117.916234] env[63345]: value = "domain-c8" [ 1117.916234] env[63345]: _type = "ClusterComputeResource" [ 1117.916234] env[63345]: } {{(pid=63345) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1117.918361] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7e08a5-9cde-4a19-8030-cc1fdadc02d4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.948054] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Got total of 5 instances {{(pid=63345) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1117.948054] env[63345]: WARNING nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] While synchronizing instance power states, found 6 instances in the database and 5 instances on the hypervisor. [ 1117.948283] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Triggering sync for uuid 22a11cf9-8f85-4371-98eb-25b267c9aff7 {{(pid=63345) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 1117.948564] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Triggering sync for uuid 95738bee-d291-4f27-aeff-9445939bb3fa {{(pid=63345) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 1117.948798] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Triggering sync for uuid 148c961e-d260-4dbd-ad9f-52f94b072096 {{(pid=63345) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 1117.949043] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Triggering sync for uuid 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8 {{(pid=63345) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 1117.949274] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Triggering sync for uuid 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46 {{(pid=63345) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 1117.949604] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Triggering sync for uuid 735c5f4f-98c1-4c75-bb82-66e49b0233f6 {{(pid=63345) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 1117.950145] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.950599] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "95738bee-d291-4f27-aeff-9445939bb3fa" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.950974] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "148c961e-d260-4dbd-ad9f-52f94b072096" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.951314] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "148c961e-d260-4dbd-ad9f-52f94b072096" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1117.951780] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.952104] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1117.952502] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.952815] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1117.953223] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.953511] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1117.953722] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63345) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 1117.954992] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9949f5-6ce2-4d13-8e29-51700fa5ad30 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.960055] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb77c87-6b35-4d23-9d1c-8797f9cee915 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.963978] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f35d0ad8-6027-4874-9542-b0c8ab2e0291 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.967555] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager.update_available_resource {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1118.247303] env[63345]: DEBUG nova.network.neutron [req-0382c80f-975b-4f18-a3b2-7a580590504c req-2cdab770-543f-4e3b-aeef-91a13d58a6cc service nova] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Updated VIF entry in instance network info cache for port 5e13d081-150b-4a13-a4c9-54ea78065ffe. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1118.247664] env[63345]: DEBUG nova.network.neutron [req-0382c80f-975b-4f18-a3b2-7a580590504c req-2cdab770-543f-4e3b-aeef-91a13d58a6cc service nova] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Updating instance_info_cache with network_info: [{"id": "5e13d081-150b-4a13-a4c9-54ea78065ffe", "address": "fa:16:3e:55:b1:df", "network": {"id": "372a3368-2d7a-4380-b811-7ad477d85250", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-454648225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41afa63287424a549133615eb390bac7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e13d081-15", "ovs_interfaceid": "5e13d081-150b-4a13-a4c9-54ea78065ffe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.291595] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017911, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.784702} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.294640] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/00f54121-1c47-489a-9345-a57300eace29/00f54121-1c47-489a-9345-a57300eace29.vmdk to [datastore2] 95738bee-d291-4f27-aeff-9445939bb3fa/95738bee-d291-4f27-aeff-9445939bb3fa.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1118.295012] env[63345]: DEBUG oslo_vmware.api [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017916, 'name': PowerOffVM_Task, 'duration_secs': 0.370557} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.295702] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f3d66c8-ac70-4a27-9374-af360baa6a07 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.298206] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1118.298442] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1118.298699] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-709842ab-fbed-4ee9-b753-c254e884fbfd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.324275] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 95738bee-d291-4f27-aeff-9445939bb3fa/95738bee-d291-4f27-aeff-9445939bb3fa.vmdk or device None with type streamOptimized {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1118.325054] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52df39b0-7cef-4cd8-8480-daff521d82b0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.348627] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017915, 'name': CreateVM_Task, 'duration_secs': 0.818706} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.349940] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1118.350329] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1118.350329] env[63345]: value = "task-1017918" [ 1118.350329] env[63345]: _type = "Task" [ 1118.350329] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.350979] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1118.351168] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.351503] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1118.351840] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea6cf804-e941-4322-b83a-7ebf343803bc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.360859] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1118.360859] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52cc0a08-ede6-b797-8017-67fbec3b3e42" [ 1118.360859] env[63345]: _type = "Task" [ 1118.360859] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.364182] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017918, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.372916] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52cc0a08-ede6-b797-8017-67fbec3b3e42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.391696] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1118.392171] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1118.392481] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Deleting the datastore file [datastore2] 22a11cf9-8f85-4371-98eb-25b267c9aff7 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1118.392798] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b436c21-ba78-4093-8893-9e4da44a4f0c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.400413] env[63345]: DEBUG oslo_vmware.api [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1118.400413] env[63345]: value = "task-1017919" [ 1118.400413] env[63345]: _type = "Task" [ 1118.400413] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.409883] env[63345]: DEBUG oslo_vmware.api [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017919, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.472790] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1118.473059] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.473258] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1118.473420] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63345) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1118.474509] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1409e29c-7da4-4dad-8878-14d3de6d298c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.484950] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.533s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1118.486244] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee7cc054-199c-4c2c-b4d7-f45fddae2b59 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.490426] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "148c961e-d260-4dbd-ad9f-52f94b072096" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.539s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1118.490778] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.538s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1118.501533] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1be4bd-aa6f-4311-b076-b222b5103849 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.508875] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62ded75-01d2-41ac-aed1-e5cd5b8032ee {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.541098] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180392MB free_disk=186GB free_vcpus=48 pci_devices=None {{(pid=63345) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1118.541297] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1118.541482] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.751705] env[63345]: DEBUG oslo_concurrency.lockutils [req-0382c80f-975b-4f18-a3b2-7a580590504c req-2cdab770-543f-4e3b-aeef-91a13d58a6cc service nova] Releasing lock "refresh_cache-735c5f4f-98c1-4c75-bb82-66e49b0233f6" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1118.862207] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017918, 'name': ReconfigVM_Task, 'duration_secs': 0.318411} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.862828] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 95738bee-d291-4f27-aeff-9445939bb3fa/95738bee-d291-4f27-aeff-9445939bb3fa.vmdk or device None with type streamOptimized {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1118.864016] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-44f06235-0335-4385-89e4-9f86669392f8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.873881] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52cc0a08-ede6-b797-8017-67fbec3b3e42, 'name': SearchDatastore_Task, 'duration_secs': 0.042386} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.875169] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1118.875420] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1118.875659] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1118.875815] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.876035] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1118.876395] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1118.876395] env[63345]: value = "task-1017920" [ 1118.876395] env[63345]: _type = "Task" [ 1118.876395] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.876604] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-197cb8c3-89df-4374-a7b0-a85b31b1b13e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.887801] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017920, 'name': Rename_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.888908] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1118.889116] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1118.889812] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e31c249-9904-4700-b3ab-cae51a0b6ede {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.894819] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1118.894819] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]528308b2-afc2-6d28-5b16-a4b75a26bb9c" [ 1118.894819] env[63345]: _type = "Task" [ 1118.894819] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.903181] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]528308b2-afc2-6d28-5b16-a4b75a26bb9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.912443] env[63345]: DEBUG oslo_vmware.api [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017919, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174301} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.912687] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1118.912875] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1118.913077] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1118.913266] env[63345]: INFO nova.compute.manager [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1118.913508] env[63345]: DEBUG oslo.service.loopingcall [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1118.913723] env[63345]: DEBUG nova.compute.manager [-] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1118.913814] env[63345]: DEBUG nova.network.neutron [-] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1119.392167] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017920, 'name': Rename_Task, 'duration_secs': 0.149922} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.392562] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1119.392885] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61ebac71-c115-440a-9763-d08805fb6bec {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.403015] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1119.403015] env[63345]: value = "task-1017921" [ 1119.403015] env[63345]: _type = "Task" [ 1119.403015] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.407873] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]528308b2-afc2-6d28-5b16-a4b75a26bb9c, 'name': SearchDatastore_Task, 'duration_secs': 0.0096} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.413021] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f240e66d-2245-4036-a11f-e9bcf5340206 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.422372] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1119.422372] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]525c5f69-074c-02e7-4cf6-85c19e4476cb" [ 1119.422372] env[63345]: _type = "Task" [ 1119.422372] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.426067] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017921, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.441710] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]525c5f69-074c-02e7-4cf6-85c19e4476cb, 'name': SearchDatastore_Task, 'duration_secs': 0.014352} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.442122] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1119.442460] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 735c5f4f-98c1-4c75-bb82-66e49b0233f6/735c5f4f-98c1-4c75-bb82-66e49b0233f6.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1119.442803] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-861ed756-743d-481a-9672-d2dc018cd5e6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.454154] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1119.454154] env[63345]: value = "task-1017922" [ 1119.454154] env[63345]: _type = "Task" [ 1119.454154] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.464746] env[63345]: DEBUG nova.compute.manager [req-5f590073-9a44-415f-86c9-af0d8bfe72a5 req-cce286c4-a42f-4fdd-930b-02cc7bf26fa9 service nova] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Received event network-vif-deleted-267ad158-547a-4d3a-a838-3d964626d731 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1119.465260] env[63345]: INFO nova.compute.manager [req-5f590073-9a44-415f-86c9-af0d8bfe72a5 req-cce286c4-a42f-4fdd-930b-02cc7bf26fa9 service nova] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Neutron deleted interface 267ad158-547a-4d3a-a838-3d964626d731; detaching it from the instance and deleting it from the info cache [ 1119.465664] env[63345]: DEBUG nova.network.neutron [req-5f590073-9a44-415f-86c9-af0d8bfe72a5 req-cce286c4-a42f-4fdd-930b-02cc7bf26fa9 service nova] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.475856] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017922, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.574021] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 22a11cf9-8f85-4371-98eb-25b267c9aff7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1119.574021] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 148c961e-d260-4dbd-ad9f-52f94b072096 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1119.574021] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1119.574021] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1119.574021] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 95738bee-d291-4f27-aeff-9445939bb3fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1119.574021] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 735c5f4f-98c1-4c75-bb82-66e49b0233f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1119.574021] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1119.574021] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1119.674508] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea606bf-c3e9-4814-8694-3470eb1f9056 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.685112] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef595e4-1fcd-4f2a-a1d8-6b22c80fb9c2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.721903] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5649aede-445e-4e1b-b3fb-0c831efd5c41 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.732149] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668c009f-9756-4a1d-9f9d-37c70510de43 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.751411] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1119.886165] env[63345]: DEBUG nova.network.neutron [-] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.919237] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017921, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.965354] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017922, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504204} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.965648] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 735c5f4f-98c1-4c75-bb82-66e49b0233f6/735c5f4f-98c1-4c75-bb82-66e49b0233f6.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1119.965877] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1119.966198] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-24a42d7b-fb32-44c6-8740-3cd0a64d63ca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.970241] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7af3c10f-8678-44ef-89a7-261b3d02a0e8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.974039] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1119.974039] env[63345]: value = "task-1017923" [ 1119.974039] env[63345]: _type = "Task" [ 1119.974039] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.981718] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d26d7b-b111-4cf2-b762-75344fb7a19e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.995038] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017923, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.015183] env[63345]: DEBUG nova.compute.manager [req-5f590073-9a44-415f-86c9-af0d8bfe72a5 req-cce286c4-a42f-4fdd-930b-02cc7bf26fa9 service nova] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Detach interface failed, port_id=267ad158-547a-4d3a-a838-3d964626d731, reason: Instance 22a11cf9-8f85-4371-98eb-25b267c9aff7 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1120.291794] env[63345]: DEBUG nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 164 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1120.292043] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 164 to 165 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1120.292233] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1120.388879] env[63345]: INFO nova.compute.manager [-] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] Took 1.47 seconds to deallocate network for instance. [ 1120.419109] env[63345]: DEBUG oslo_vmware.api [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017921, 'name': PowerOnVM_Task, 'duration_secs': 0.560467} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.419416] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1120.484340] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017923, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.142995} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.484627] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1120.485450] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4482089a-f5c2-4761-b234-e590003eef29 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.509852] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] 735c5f4f-98c1-4c75-bb82-66e49b0233f6/735c5f4f-98c1-4c75-bb82-66e49b0233f6.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1120.510528] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b30951ed-5b03-4b05-bbaa-e006d0da651d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.525235] env[63345]: DEBUG nova.compute.manager [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1120.525966] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96350a7-9647-446e-a602-6fec0445dd15 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.537308] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1120.537308] env[63345]: value = "task-1017924" [ 1120.537308] env[63345]: _type = "Task" [ 1120.537308] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.545669] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017924, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.713577] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.713577] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.797352] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63345) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1120.797527] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.256s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.895523] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.895907] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.895907] env[63345]: DEBUG nova.objects.instance [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lazy-loading 'resources' on Instance uuid 22a11cf9-8f85-4371-98eb-25b267c9aff7 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1121.051284] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017924, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.051811] env[63345]: DEBUG oslo_concurrency.lockutils [None req-a0f6527e-1edc-451c-bcef-af788c9e1b6b tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "95738bee-d291-4f27-aeff-9445939bb3fa" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 21.607s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.052691] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "95738bee-d291-4f27-aeff-9445939bb3fa" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.102s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.052893] env[63345]: INFO nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] During sync_power_state the instance has a pending task (spawning). Skip. [ 1121.053088] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "95738bee-d291-4f27-aeff-9445939bb3fa" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.216271] env[63345]: DEBUG nova.compute.manager [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1121.524246] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8293f0d-32a2-4bea-aac2-183dd58d3f28 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.532901] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c07479-90d5-4935-80b9-6f703c016aa0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.571841] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a2bd8d-9dc8-424e-bdd5-0e2211eda1a9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.580470] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017924, 'name': ReconfigVM_Task, 'duration_secs': 1.034917} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.582725] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Reconfigured VM instance instance-00000072 to attach disk [datastore2] 735c5f4f-98c1-4c75-bb82-66e49b0233f6/735c5f4f-98c1-4c75-bb82-66e49b0233f6.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1121.583462] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f09e5283-c123-4d5c-b974-b3fbd1febfdf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.586033] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e31ac7-1627-4721-b055-f272669b5871 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.602172] env[63345]: DEBUG nova.compute.provider_tree [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1121.604651] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1121.604651] env[63345]: value = "task-1017925" [ 1121.604651] env[63345]: _type = "Task" [ 1121.604651] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.615026] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017925, 'name': Rename_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.737976] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1122.118166] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017925, 'name': Rename_Task, 'duration_secs': 0.155212} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.118482] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1122.118721] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e1328c72-58d6-4024-b527-5201c5d6bb78 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.123219] env[63345]: ERROR nova.scheduler.client.report [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [req-07240fbf-f818-4e93-aae1-0eb644f046f3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-07240fbf-f818-4e93-aae1-0eb644f046f3"}]} [ 1122.126491] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1122.126491] env[63345]: value = "task-1017926" [ 1122.126491] env[63345]: _type = "Task" [ 1122.126491] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.134791] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017926, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.139774] env[63345]: DEBUG nova.scheduler.client.report [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1122.156834] env[63345]: DEBUG nova.scheduler.client.report [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1122.156834] env[63345]: DEBUG nova.compute.provider_tree [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1122.171293] env[63345]: DEBUG nova.scheduler.client.report [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1122.191030] env[63345]: DEBUG nova.scheduler.client.report [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1122.288746] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a7fb29c-2415-467a-9f74-f690256cd51f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.297127] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7192655-7cbe-46ba-99f7-e7d9e171ce96 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.328901] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e31137f5-9644-4e58-b32f-c9c13f6507fa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.337021] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e9965f-4272-4a0c-b918-43c47ddff9c7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.350842] env[63345]: DEBUG nova.compute.provider_tree [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1122.639308] env[63345]: DEBUG oslo_vmware.api [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017926, 'name': PowerOnVM_Task, 'duration_secs': 0.496345} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.639308] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1122.639538] env[63345]: INFO nova.compute.manager [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Took 7.79 seconds to spawn the instance on the hypervisor. [ 1122.639754] env[63345]: DEBUG nova.compute.manager [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1122.640558] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e9640f-e72b-45e5-a0fb-da454020d037 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.886296] env[63345]: DEBUG nova.scheduler.client.report [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 166 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1122.886598] env[63345]: DEBUG nova.compute.provider_tree [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 166 to 167 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1122.886825] env[63345]: DEBUG nova.compute.provider_tree [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1123.167121] env[63345]: INFO nova.compute.manager [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Took 13.46 seconds to build instance. [ 1123.391547] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.496s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.394671] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.656s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.398801] env[63345]: INFO nova.compute.claims [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1123.416765] env[63345]: INFO nova.scheduler.client.report [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Deleted allocations for instance 22a11cf9-8f85-4371-98eb-25b267c9aff7 [ 1123.608446] env[63345]: DEBUG nova.compute.manager [req-f01b2d73-e49c-4f89-abef-105f4c671000 req-8f59bb92-508e-4c31-9ca0-604ce0efcea1 service nova] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Received event network-changed-5e13d081-150b-4a13-a4c9-54ea78065ffe {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1123.608446] env[63345]: DEBUG nova.compute.manager [req-f01b2d73-e49c-4f89-abef-105f4c671000 req-8f59bb92-508e-4c31-9ca0-604ce0efcea1 service nova] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Refreshing instance network info cache due to event network-changed-5e13d081-150b-4a13-a4c9-54ea78065ffe. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1123.608446] env[63345]: DEBUG oslo_concurrency.lockutils [req-f01b2d73-e49c-4f89-abef-105f4c671000 req-8f59bb92-508e-4c31-9ca0-604ce0efcea1 service nova] Acquiring lock "refresh_cache-735c5f4f-98c1-4c75-bb82-66e49b0233f6" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1123.608446] env[63345]: DEBUG oslo_concurrency.lockutils [req-f01b2d73-e49c-4f89-abef-105f4c671000 req-8f59bb92-508e-4c31-9ca0-604ce0efcea1 service nova] Acquired lock "refresh_cache-735c5f4f-98c1-4c75-bb82-66e49b0233f6" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.608446] env[63345]: DEBUG nova.network.neutron [req-f01b2d73-e49c-4f89-abef-105f4c671000 req-8f59bb92-508e-4c31-9ca0-604ce0efcea1 service nova] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Refreshing network info cache for port 5e13d081-150b-4a13-a4c9-54ea78065ffe {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1123.669730] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dcc7d2ee-09ed-4007-9c9f-b9de2d4685a6 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.976s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.670241] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.717s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.670671] env[63345]: INFO nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] During sync_power_state the instance has a pending task (spawning). Skip. [ 1123.671238] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.928623] env[63345]: DEBUG oslo_concurrency.lockutils [None req-f6898dab-1e05-4402-9ae5-c028ff8a9823 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.685s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.929540] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.980s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.930616] env[63345]: INFO nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 22a11cf9-8f85-4371-98eb-25b267c9aff7] During sync_power_state the instance has a pending task (deleting). Skip. [ 1123.930616] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "22a11cf9-8f85-4371-98eb-25b267c9aff7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.424504] env[63345]: DEBUG nova.network.neutron [req-f01b2d73-e49c-4f89-abef-105f4c671000 req-8f59bb92-508e-4c31-9ca0-604ce0efcea1 service nova] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Updated VIF entry in instance network info cache for port 5e13d081-150b-4a13-a4c9-54ea78065ffe. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1124.424504] env[63345]: DEBUG nova.network.neutron [req-f01b2d73-e49c-4f89-abef-105f4c671000 req-8f59bb92-508e-4c31-9ca0-604ce0efcea1 service nova] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Updating instance_info_cache with network_info: [{"id": "5e13d081-150b-4a13-a4c9-54ea78065ffe", "address": "fa:16:3e:55:b1:df", "network": {"id": "372a3368-2d7a-4380-b811-7ad477d85250", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-454648225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41afa63287424a549133615eb390bac7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5b8af79a-31d5-4d78-93d7-3919aa1d9186", "external-id": "nsx-vlan-transportzone-324", "segmentation_id": 324, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e13d081-15", "ovs_interfaceid": "5e13d081-150b-4a13-a4c9-54ea78065ffe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.531740] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60c3e22-54e7-41e7-bb49-01b2ae0cf272 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.540327] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd70639e-8af1-4e85-ab1f-46684d1c81e7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.571781] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425543a5-f3e3-44a5-ae67-cc22355d753a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.580837] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5f6a82-07bb-4246-94f7-8b8c409c64c5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.598091] env[63345]: DEBUG nova.compute.provider_tree [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1124.928898] env[63345]: DEBUG oslo_concurrency.lockutils [req-f01b2d73-e49c-4f89-abef-105f4c671000 req-8f59bb92-508e-4c31-9ca0-604ce0efcea1 service nova] Releasing lock "refresh_cache-735c5f4f-98c1-4c75-bb82-66e49b0233f6" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1125.103035] env[63345]: DEBUG nova.scheduler.client.report [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1125.519717] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Acquiring lock "c32818dc-b416-44c0-bcac-85b318a9cb84" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1125.520070] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Lock "c32818dc-b416-44c0-bcac-85b318a9cb84" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1125.607898] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.214s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.608486] env[63345]: DEBUG nova.compute.manager [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1125.913289] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1125.913530] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.022384] env[63345]: DEBUG nova.compute.manager [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1126.113348] env[63345]: DEBUG nova.compute.utils [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1126.115502] env[63345]: DEBUG nova.compute.manager [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1126.115502] env[63345]: DEBUG nova.network.neutron [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1126.161336] env[63345]: DEBUG nova.policy [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22ef13d4324a4357bcbd6fc6d755c101', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a7aaf150ea243b6a38a4b14f265bd4d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 1126.417997] env[63345]: DEBUG nova.compute.manager [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1126.443981] env[63345]: DEBUG nova.network.neutron [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Successfully created port: 4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1126.547443] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.547706] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.549229] env[63345]: INFO nova.compute.claims [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1126.618318] env[63345]: DEBUG nova.compute.manager [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1126.940178] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.628265] env[63345]: DEBUG nova.compute.manager [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1127.657543] env[63345]: DEBUG nova.virt.hardware [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1127.657790] env[63345]: DEBUG nova.virt.hardware [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1127.657954] env[63345]: DEBUG nova.virt.hardware [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1127.658170] env[63345]: DEBUG nova.virt.hardware [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1127.658326] env[63345]: DEBUG nova.virt.hardware [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1127.658478] env[63345]: DEBUG nova.virt.hardware [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1127.658688] env[63345]: DEBUG nova.virt.hardware [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1127.658856] env[63345]: DEBUG nova.virt.hardware [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1127.659041] env[63345]: DEBUG nova.virt.hardware [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1127.659217] env[63345]: DEBUG nova.virt.hardware [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1127.659485] env[63345]: DEBUG nova.virt.hardware [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1127.661013] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c5db5c-e28e-4316-9389-95f44af72670 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.664959] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f35fee31-58c5-4731-b48a-13678c8fa031 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.674368] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7795c83a-f15a-457a-a1d0-8e64c3f06e51 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.678541] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8d248f-fde3-47f9-8e81-fc4ee10f8e56 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.718256] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f068a6b-b38a-4788-a458-a23b4024c611 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.726501] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08312ee2-fcfb-441c-afb9-8f90ebd8e5ad {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.739907] env[63345]: DEBUG nova.compute.provider_tree [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.860783] env[63345]: DEBUG nova.compute.manager [req-0606ad1d-d68c-4632-ad12-a2ee0738b70a req-b6f4fb3f-d77e-4ac0-ad95-21fe1199d29e service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Received event network-vif-plugged-4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1127.860977] env[63345]: DEBUG oslo_concurrency.lockutils [req-0606ad1d-d68c-4632-ad12-a2ee0738b70a req-b6f4fb3f-d77e-4ac0-ad95-21fe1199d29e service nova] Acquiring lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.861230] env[63345]: DEBUG oslo_concurrency.lockutils [req-0606ad1d-d68c-4632-ad12-a2ee0738b70a req-b6f4fb3f-d77e-4ac0-ad95-21fe1199d29e service nova] Lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.861409] env[63345]: DEBUG oslo_concurrency.lockutils [req-0606ad1d-d68c-4632-ad12-a2ee0738b70a req-b6f4fb3f-d77e-4ac0-ad95-21fe1199d29e service nova] Lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.861581] env[63345]: DEBUG nova.compute.manager [req-0606ad1d-d68c-4632-ad12-a2ee0738b70a req-b6f4fb3f-d77e-4ac0-ad95-21fe1199d29e service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] No waiting events found dispatching network-vif-plugged-4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1127.861747] env[63345]: WARNING nova.compute.manager [req-0606ad1d-d68c-4632-ad12-a2ee0738b70a req-b6f4fb3f-d77e-4ac0-ad95-21fe1199d29e service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Received unexpected event network-vif-plugged-4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b for instance with vm_state building and task_state spawning. [ 1127.961822] env[63345]: DEBUG nova.network.neutron [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Successfully updated port: 4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1128.242716] env[63345]: DEBUG nova.scheduler.client.report [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1128.464872] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1128.465072] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.465238] env[63345]: DEBUG nova.network.neutron [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1128.748520] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.201s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.749030] env[63345]: DEBUG nova.compute.manager [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1128.751941] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.812s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.754525] env[63345]: INFO nova.compute.claims [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1128.996676] env[63345]: DEBUG nova.network.neutron [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1129.121873] env[63345]: DEBUG nova.network.neutron [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Updating instance_info_cache with network_info: [{"id": "4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b", "address": "fa:16:3e:0e:aa:f1", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4244898c-6e", "ovs_interfaceid": "4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.261535] env[63345]: DEBUG nova.compute.utils [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1129.262875] env[63345]: DEBUG nova.compute.manager [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1129.263063] env[63345]: DEBUG nova.network.neutron [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1129.300074] env[63345]: DEBUG nova.policy [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ad39d3db879648bab57b7cac73ef7c9a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e1eaba3c6a3457b9efe8d599a1a0ef9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 1129.534989] env[63345]: DEBUG nova.network.neutron [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Successfully created port: 59a71e22-f87b-421e-a3e0-1c3fa40e695e {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1129.624880] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Releasing lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1129.625240] env[63345]: DEBUG nova.compute.manager [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Instance network_info: |[{"id": "4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b", "address": "fa:16:3e:0e:aa:f1", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4244898c-6e", "ovs_interfaceid": "4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1129.625695] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:aa:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3753f451-fa23-4988-9361-074fb0bd3fd4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1129.633309] env[63345]: DEBUG oslo.service.loopingcall [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1129.633528] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1129.633750] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a19b238e-6cf3-4206-944d-a07c76c1250b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.654533] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1129.654533] env[63345]: value = "task-1017927" [ 1129.654533] env[63345]: _type = "Task" [ 1129.654533] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.662522] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017927, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.766470] env[63345]: DEBUG nova.compute.manager [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1129.877117] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df26737-bff1-418a-a359-df44bf5b55b7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.887640] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab2c10f-0240-4266-a832-3093653f1874 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.893170] env[63345]: DEBUG nova.compute.manager [req-169d44ee-ecac-4674-beb3-24fb8660c448 req-57a6f7d4-5c94-4623-9893-999dee5b5ed7 service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Received event network-changed-4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1129.893544] env[63345]: DEBUG nova.compute.manager [req-169d44ee-ecac-4674-beb3-24fb8660c448 req-57a6f7d4-5c94-4623-9893-999dee5b5ed7 service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Refreshing instance network info cache due to event network-changed-4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1129.893749] env[63345]: DEBUG oslo_concurrency.lockutils [req-169d44ee-ecac-4674-beb3-24fb8660c448 req-57a6f7d4-5c94-4623-9893-999dee5b5ed7 service nova] Acquiring lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1129.893990] env[63345]: DEBUG oslo_concurrency.lockutils [req-169d44ee-ecac-4674-beb3-24fb8660c448 req-57a6f7d4-5c94-4623-9893-999dee5b5ed7 service nova] Acquired lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.894279] env[63345]: DEBUG nova.network.neutron [req-169d44ee-ecac-4674-beb3-24fb8660c448 req-57a6f7d4-5c94-4623-9893-999dee5b5ed7 service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Refreshing network info cache for port 4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1129.926789] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fdc6701-240d-44ad-9357-fb86e3f203cb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.937292] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11110d8b-ff70-4c8a-9650-4917af39e453 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.954166] env[63345]: DEBUG nova.compute.provider_tree [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1130.166027] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017927, 'name': CreateVM_Task, 'duration_secs': 0.309742} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.166027] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1130.166718] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1130.166871] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.167936] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1130.167936] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd7227fc-7851-4e9d-b82e-45806fd79e91 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.173639] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1130.173639] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5297a8f9-6b3c-de14-0dc4-2d91be518f0e" [ 1130.173639] env[63345]: _type = "Task" [ 1130.173639] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.182447] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5297a8f9-6b3c-de14-0dc4-2d91be518f0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.456885] env[63345]: DEBUG nova.scheduler.client.report [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1130.604889] env[63345]: DEBUG nova.network.neutron [req-169d44ee-ecac-4674-beb3-24fb8660c448 req-57a6f7d4-5c94-4623-9893-999dee5b5ed7 service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Updated VIF entry in instance network info cache for port 4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1130.606102] env[63345]: DEBUG nova.network.neutron [req-169d44ee-ecac-4674-beb3-24fb8660c448 req-57a6f7d4-5c94-4623-9893-999dee5b5ed7 service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Updating instance_info_cache with network_info: [{"id": "4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b", "address": "fa:16:3e:0e:aa:f1", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4244898c-6e", "ovs_interfaceid": "4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.683571] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5297a8f9-6b3c-de14-0dc4-2d91be518f0e, 'name': SearchDatastore_Task, 'duration_secs': 0.010086} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.683876] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1130.684161] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1130.684407] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1130.684563] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.684747] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1130.685039] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-990a9a17-5336-445c-a0b6-905358ede727 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.692651] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1130.692827] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1130.693502] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5e16bd9-80e2-4669-b9f7-e5836cb5fe08 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.698321] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1130.698321] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52c15cc8-1fed-9dba-ad42-4a9658550066" [ 1130.698321] env[63345]: _type = "Task" [ 1130.698321] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.705322] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c15cc8-1fed-9dba-ad42-4a9658550066, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.775515] env[63345]: DEBUG nova.compute.manager [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1130.801762] env[63345]: DEBUG nova.virt.hardware [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1130.801963] env[63345]: DEBUG nova.virt.hardware [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1130.802190] env[63345]: DEBUG nova.virt.hardware [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1130.802393] env[63345]: DEBUG nova.virt.hardware [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1130.802549] env[63345]: DEBUG nova.virt.hardware [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1130.802706] env[63345]: DEBUG nova.virt.hardware [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1130.802919] env[63345]: DEBUG nova.virt.hardware [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1130.803095] env[63345]: DEBUG nova.virt.hardware [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1130.803271] env[63345]: DEBUG nova.virt.hardware [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1130.803463] env[63345]: DEBUG nova.virt.hardware [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1130.803611] env[63345]: DEBUG nova.virt.hardware [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1130.804516] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be14abcf-ad74-48e9-98b2-070341f24516 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.812231] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2695b20a-42a6-46d2-a8a6-7e3b220fbb9e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.965022] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.210s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.965022] env[63345]: DEBUG nova.compute.manager [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1130.995444] env[63345]: DEBUG nova.network.neutron [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Successfully updated port: 59a71e22-f87b-421e-a3e0-1c3fa40e695e {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1131.107973] env[63345]: DEBUG oslo_concurrency.lockutils [req-169d44ee-ecac-4674-beb3-24fb8660c448 req-57a6f7d4-5c94-4623-9893-999dee5b5ed7 service nova] Releasing lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1131.208277] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52c15cc8-1fed-9dba-ad42-4a9658550066, 'name': SearchDatastore_Task, 'duration_secs': 0.007721} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.209009] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c915838-9190-47c5-b5bb-c852215c1923 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.213783] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1131.213783] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52f5913c-20cc-7367-6d77-d30c39df20e4" [ 1131.213783] env[63345]: _type = "Task" [ 1131.213783] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.222279] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f5913c-20cc-7367-6d77-d30c39df20e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.469581] env[63345]: DEBUG nova.compute.utils [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1131.470999] env[63345]: DEBUG nova.compute.manager [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1131.471190] env[63345]: DEBUG nova.network.neutron [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1131.501452] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Acquiring lock "refresh_cache-c32818dc-b416-44c0-bcac-85b318a9cb84" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1131.502166] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Acquired lock "refresh_cache-c32818dc-b416-44c0-bcac-85b318a9cb84" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.502166] env[63345]: DEBUG nova.network.neutron [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1131.511328] env[63345]: DEBUG nova.policy [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f30f1732c89e4f1d87bc564854295c21', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4560e378b6aa47a3bbb5a2f7c5b76f5f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 1131.725326] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52f5913c-20cc-7367-6d77-d30c39df20e4, 'name': SearchDatastore_Task, 'duration_secs': 0.009119} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.725621] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1131.725884] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 937ba0d6-bf23-45ae-8d75-cd7559e436f5/937ba0d6-bf23-45ae-8d75-cd7559e436f5.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1131.726170] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4dc006a-0f12-45f5-ab6e-80fe5d29d80f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.733209] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1131.733209] env[63345]: value = "task-1017928" [ 1131.733209] env[63345]: _type = "Task" [ 1131.733209] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.741110] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017928, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.787459] env[63345]: DEBUG nova.network.neutron [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Successfully created port: c20cba8e-091b-4afd-9e5e-4d87441d4aea {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1131.929362] env[63345]: DEBUG nova.compute.manager [req-76b1f620-5d34-4989-b642-538d5d4c5a6b req-de665d1e-d75b-479e-9138-e94416925d76 service nova] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Received event network-vif-plugged-59a71e22-f87b-421e-a3e0-1c3fa40e695e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1131.929362] env[63345]: DEBUG oslo_concurrency.lockutils [req-76b1f620-5d34-4989-b642-538d5d4c5a6b req-de665d1e-d75b-479e-9138-e94416925d76 service nova] Acquiring lock "c32818dc-b416-44c0-bcac-85b318a9cb84-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.929362] env[63345]: DEBUG oslo_concurrency.lockutils [req-76b1f620-5d34-4989-b642-538d5d4c5a6b req-de665d1e-d75b-479e-9138-e94416925d76 service nova] Lock "c32818dc-b416-44c0-bcac-85b318a9cb84-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1131.929362] env[63345]: DEBUG oslo_concurrency.lockutils [req-76b1f620-5d34-4989-b642-538d5d4c5a6b req-de665d1e-d75b-479e-9138-e94416925d76 service nova] Lock "c32818dc-b416-44c0-bcac-85b318a9cb84-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.929362] env[63345]: DEBUG nova.compute.manager [req-76b1f620-5d34-4989-b642-538d5d4c5a6b req-de665d1e-d75b-479e-9138-e94416925d76 service nova] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] No waiting events found dispatching network-vif-plugged-59a71e22-f87b-421e-a3e0-1c3fa40e695e {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1131.929362] env[63345]: WARNING nova.compute.manager [req-76b1f620-5d34-4989-b642-538d5d4c5a6b req-de665d1e-d75b-479e-9138-e94416925d76 service nova] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Received unexpected event network-vif-plugged-59a71e22-f87b-421e-a3e0-1c3fa40e695e for instance with vm_state building and task_state spawning. [ 1131.929362] env[63345]: DEBUG nova.compute.manager [req-76b1f620-5d34-4989-b642-538d5d4c5a6b req-de665d1e-d75b-479e-9138-e94416925d76 service nova] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Received event network-changed-59a71e22-f87b-421e-a3e0-1c3fa40e695e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1131.929362] env[63345]: DEBUG nova.compute.manager [req-76b1f620-5d34-4989-b642-538d5d4c5a6b req-de665d1e-d75b-479e-9138-e94416925d76 service nova] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Refreshing instance network info cache due to event network-changed-59a71e22-f87b-421e-a3e0-1c3fa40e695e. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1131.929362] env[63345]: DEBUG oslo_concurrency.lockutils [req-76b1f620-5d34-4989-b642-538d5d4c5a6b req-de665d1e-d75b-479e-9138-e94416925d76 service nova] Acquiring lock "refresh_cache-c32818dc-b416-44c0-bcac-85b318a9cb84" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1131.974657] env[63345]: DEBUG nova.compute.manager [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1132.039379] env[63345]: DEBUG nova.network.neutron [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1132.177525] env[63345]: DEBUG nova.network.neutron [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Updating instance_info_cache with network_info: [{"id": "59a71e22-f87b-421e-a3e0-1c3fa40e695e", "address": "fa:16:3e:71:f2:e2", "network": {"id": "b418e525-8db2-4b16-835b-925725fc7f9f", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-691762602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e1eaba3c6a3457b9efe8d599a1a0ef9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59a71e22-f8", "ovs_interfaceid": "59a71e22-f87b-421e-a3e0-1c3fa40e695e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.242951] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017928, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475381} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.243225] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 937ba0d6-bf23-45ae-8d75-cd7559e436f5/937ba0d6-bf23-45ae-8d75-cd7559e436f5.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1132.243448] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1132.244095] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-45360b57-27b8-4197-9c6e-936bb5fbaae9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.249970] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1132.249970] env[63345]: value = "task-1017929" [ 1132.249970] env[63345]: _type = "Task" [ 1132.249970] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.257997] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017929, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.679932] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Releasing lock "refresh_cache-c32818dc-b416-44c0-bcac-85b318a9cb84" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1132.680227] env[63345]: DEBUG nova.compute.manager [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Instance network_info: |[{"id": "59a71e22-f87b-421e-a3e0-1c3fa40e695e", "address": "fa:16:3e:71:f2:e2", "network": {"id": "b418e525-8db2-4b16-835b-925725fc7f9f", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-691762602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e1eaba3c6a3457b9efe8d599a1a0ef9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59a71e22-f8", "ovs_interfaceid": "59a71e22-f87b-421e-a3e0-1c3fa40e695e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1132.680580] env[63345]: DEBUG oslo_concurrency.lockutils [req-76b1f620-5d34-4989-b642-538d5d4c5a6b req-de665d1e-d75b-479e-9138-e94416925d76 service nova] Acquired lock "refresh_cache-c32818dc-b416-44c0-bcac-85b318a9cb84" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.680774] env[63345]: DEBUG nova.network.neutron [req-76b1f620-5d34-4989-b642-538d5d4c5a6b req-de665d1e-d75b-479e-9138-e94416925d76 service nova] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Refreshing network info cache for port 59a71e22-f87b-421e-a3e0-1c3fa40e695e {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1132.682049] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:f2:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cca1f087-01e1-49ca-831b-5c51478a5d60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '59a71e22-f87b-421e-a3e0-1c3fa40e695e', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1132.689324] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Creating folder: Project (1e1eaba3c6a3457b9efe8d599a1a0ef9). Parent ref: group-v225918. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1132.692324] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6e73b80a-0a8f-4ec0-9914-7a22f8dcd0f7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.704496] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Created folder: Project (1e1eaba3c6a3457b9efe8d599a1a0ef9) in parent group-v225918. [ 1132.704685] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Creating folder: Instances. Parent ref: group-v226177. {{(pid=63345) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1132.704913] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78ff04bd-dd66-4d31-b39e-b7e699e1bcf3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.713369] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Created folder: Instances in parent group-v226177. [ 1132.713588] env[63345]: DEBUG oslo.service.loopingcall [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1132.713766] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1132.713983] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8b6fa0dc-96e2-4dfd-8418-3fdd5673329f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.735874] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1132.735874] env[63345]: value = "task-1017932" [ 1132.735874] env[63345]: _type = "Task" [ 1132.735874] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.745715] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017932, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.758594] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017929, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.052345} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.758849] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1132.759591] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd11efa2-9d72-4fdf-8edf-a5cd95d53599 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.783562] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] 937ba0d6-bf23-45ae-8d75-cd7559e436f5/937ba0d6-bf23-45ae-8d75-cd7559e436f5.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1132.786064] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bac22f52-ae61-4cbf-8c23-0bb0374241bb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.805617] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1132.805617] env[63345]: value = "task-1017933" [ 1132.805617] env[63345]: _type = "Task" [ 1132.805617] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.813469] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017933, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.960469] env[63345]: DEBUG nova.network.neutron [req-76b1f620-5d34-4989-b642-538d5d4c5a6b req-de665d1e-d75b-479e-9138-e94416925d76 service nova] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Updated VIF entry in instance network info cache for port 59a71e22-f87b-421e-a3e0-1c3fa40e695e. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1132.960834] env[63345]: DEBUG nova.network.neutron [req-76b1f620-5d34-4989-b642-538d5d4c5a6b req-de665d1e-d75b-479e-9138-e94416925d76 service nova] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Updating instance_info_cache with network_info: [{"id": "59a71e22-f87b-421e-a3e0-1c3fa40e695e", "address": "fa:16:3e:71:f2:e2", "network": {"id": "b418e525-8db2-4b16-835b-925725fc7f9f", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-691762602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e1eaba3c6a3457b9efe8d599a1a0ef9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59a71e22-f8", "ovs_interfaceid": "59a71e22-f87b-421e-a3e0-1c3fa40e695e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.983744] env[63345]: DEBUG nova.compute.manager [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1133.008987] env[63345]: DEBUG nova.virt.hardware [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1133.009234] env[63345]: DEBUG nova.virt.hardware [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1133.009449] env[63345]: DEBUG nova.virt.hardware [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1133.009680] env[63345]: DEBUG nova.virt.hardware [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1133.009867] env[63345]: DEBUG nova.virt.hardware [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1133.010063] env[63345]: DEBUG nova.virt.hardware [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1133.010320] env[63345]: DEBUG nova.virt.hardware [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1133.010519] env[63345]: DEBUG nova.virt.hardware [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1133.010725] env[63345]: DEBUG nova.virt.hardware [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1133.010936] env[63345]: DEBUG nova.virt.hardware [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1133.011153] env[63345]: DEBUG nova.virt.hardware [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1133.012073] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7408fa5c-ce8b-4a90-a7c6-d9b4b57a81eb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.019740] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6994e8f3-08a0-4e09-8d6d-0b817c78a837 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.204184] env[63345]: DEBUG nova.compute.manager [req-fb362cd9-5881-492e-9628-3e2a89d609f9 req-d7407e21-b90f-4c7e-a210-fe7d68cf6630 service nova] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Received event network-vif-plugged-c20cba8e-091b-4afd-9e5e-4d87441d4aea {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1133.204184] env[63345]: DEBUG oslo_concurrency.lockutils [req-fb362cd9-5881-492e-9628-3e2a89d609f9 req-d7407e21-b90f-4c7e-a210-fe7d68cf6630 service nova] Acquiring lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.204184] env[63345]: DEBUG oslo_concurrency.lockutils [req-fb362cd9-5881-492e-9628-3e2a89d609f9 req-d7407e21-b90f-4c7e-a210-fe7d68cf6630 service nova] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.204184] env[63345]: DEBUG oslo_concurrency.lockutils [req-fb362cd9-5881-492e-9628-3e2a89d609f9 req-d7407e21-b90f-4c7e-a210-fe7d68cf6630 service nova] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.204184] env[63345]: DEBUG nova.compute.manager [req-fb362cd9-5881-492e-9628-3e2a89d609f9 req-d7407e21-b90f-4c7e-a210-fe7d68cf6630 service nova] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] No waiting events found dispatching network-vif-plugged-c20cba8e-091b-4afd-9e5e-4d87441d4aea {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1133.204184] env[63345]: WARNING nova.compute.manager [req-fb362cd9-5881-492e-9628-3e2a89d609f9 req-d7407e21-b90f-4c7e-a210-fe7d68cf6630 service nova] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Received unexpected event network-vif-plugged-c20cba8e-091b-4afd-9e5e-4d87441d4aea for instance with vm_state building and task_state spawning. [ 1133.246340] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017932, 'name': CreateVM_Task} progress is 25%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.314987] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017933, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.463236] env[63345]: DEBUG oslo_concurrency.lockutils [req-76b1f620-5d34-4989-b642-538d5d4c5a6b req-de665d1e-d75b-479e-9138-e94416925d76 service nova] Releasing lock "refresh_cache-c32818dc-b416-44c0-bcac-85b318a9cb84" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1133.745834] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017932, 'name': CreateVM_Task, 'duration_secs': 0.878866} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.745998] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1133.746727] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1133.746891] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.747233] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1133.747487] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad2a88e7-dd43-4d22-86f0-62df5cec83b8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.751912] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Waiting for the task: (returnval){ [ 1133.751912] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]528eacc2-ad25-f8ac-dd0e-79e4fbaac7a1" [ 1133.751912] env[63345]: _type = "Task" [ 1133.751912] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.759224] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]528eacc2-ad25-f8ac-dd0e-79e4fbaac7a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.814525] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017933, 'name': ReconfigVM_Task, 'duration_secs': 0.860418} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.814811] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Reconfigured VM instance instance-00000073 to attach disk [datastore2] 937ba0d6-bf23-45ae-8d75-cd7559e436f5/937ba0d6-bf23-45ae-8d75-cd7559e436f5.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1133.815430] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ed08405c-a813-4905-9f07-b447644bdb39 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.821074] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1133.821074] env[63345]: value = "task-1017934" [ 1133.821074] env[63345]: _type = "Task" [ 1133.821074] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.827972] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017934, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.892270] env[63345]: DEBUG nova.network.neutron [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Successfully updated port: c20cba8e-091b-4afd-9e5e-4d87441d4aea {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1133.950153] env[63345]: DEBUG nova.compute.manager [req-dae880f6-f2a8-4b5b-9189-a0f68c998aab req-5514c2f1-6277-4563-8e02-9634d06c5f69 service nova] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Received event network-changed-c20cba8e-091b-4afd-9e5e-4d87441d4aea {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1133.950369] env[63345]: DEBUG nova.compute.manager [req-dae880f6-f2a8-4b5b-9189-a0f68c998aab req-5514c2f1-6277-4563-8e02-9634d06c5f69 service nova] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Refreshing instance network info cache due to event network-changed-c20cba8e-091b-4afd-9e5e-4d87441d4aea. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1133.950586] env[63345]: DEBUG oslo_concurrency.lockutils [req-dae880f6-f2a8-4b5b-9189-a0f68c998aab req-5514c2f1-6277-4563-8e02-9634d06c5f69 service nova] Acquiring lock "refresh_cache-6b6ce545-0eca-4ef2-a859-c1e8ef978150" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1133.950735] env[63345]: DEBUG oslo_concurrency.lockutils [req-dae880f6-f2a8-4b5b-9189-a0f68c998aab req-5514c2f1-6277-4563-8e02-9634d06c5f69 service nova] Acquired lock "refresh_cache-6b6ce545-0eca-4ef2-a859-c1e8ef978150" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.950900] env[63345]: DEBUG nova.network.neutron [req-dae880f6-f2a8-4b5b-9189-a0f68c998aab req-5514c2f1-6277-4563-8e02-9634d06c5f69 service nova] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Refreshing network info cache for port c20cba8e-091b-4afd-9e5e-4d87441d4aea {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1134.262895] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]528eacc2-ad25-f8ac-dd0e-79e4fbaac7a1, 'name': SearchDatastore_Task, 'duration_secs': 0.008186} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.263238] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1134.263578] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1134.263842] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.264038] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.264265] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1134.264480] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4cb91244-b1b9-4cba-83b3-588d87e76c57 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.272502] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1134.272693] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1134.273501] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2d60316-e05f-41ba-bcbf-0578a01fce08 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.278468] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Waiting for the task: (returnval){ [ 1134.278468] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52951759-b638-926f-4600-45a257adc8fd" [ 1134.278468] env[63345]: _type = "Task" [ 1134.278468] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.286339] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52951759-b638-926f-4600-45a257adc8fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.329961] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017934, 'name': Rename_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.394874] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "refresh_cache-6b6ce545-0eca-4ef2-a859-c1e8ef978150" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.483600] env[63345]: DEBUG nova.network.neutron [req-dae880f6-f2a8-4b5b-9189-a0f68c998aab req-5514c2f1-6277-4563-8e02-9634d06c5f69 service nova] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1134.554459] env[63345]: DEBUG nova.network.neutron [req-dae880f6-f2a8-4b5b-9189-a0f68c998aab req-5514c2f1-6277-4563-8e02-9634d06c5f69 service nova] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.788696] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52951759-b638-926f-4600-45a257adc8fd, 'name': SearchDatastore_Task, 'duration_secs': 0.008515} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.789477] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-116fd612-352a-426d-88d2-980524d24e11 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.794343] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Waiting for the task: (returnval){ [ 1134.794343] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52320d68-772f-6a6d-17a8-1a6cddd89005" [ 1134.794343] env[63345]: _type = "Task" [ 1134.794343] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.803423] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52320d68-772f-6a6d-17a8-1a6cddd89005, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.830632] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017934, 'name': Rename_Task, 'duration_secs': 0.927237} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.830945] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1134.831167] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b05643c-564a-44e3-933e-fa3ecf48c1a7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.837334] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1134.837334] env[63345]: value = "task-1017935" [ 1134.837334] env[63345]: _type = "Task" [ 1134.837334] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.850891] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017935, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.058800] env[63345]: DEBUG oslo_concurrency.lockutils [req-dae880f6-f2a8-4b5b-9189-a0f68c998aab req-5514c2f1-6277-4563-8e02-9634d06c5f69 service nova] Releasing lock "refresh_cache-6b6ce545-0eca-4ef2-a859-c1e8ef978150" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.058800] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquired lock "refresh_cache-6b6ce545-0eca-4ef2-a859-c1e8ef978150" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.058800] env[63345]: DEBUG nova.network.neutron [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1135.305747] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52320d68-772f-6a6d-17a8-1a6cddd89005, 'name': SearchDatastore_Task, 'duration_secs': 0.019242} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.306089] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.306379] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] c32818dc-b416-44c0-bcac-85b318a9cb84/c32818dc-b416-44c0-bcac-85b318a9cb84.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1135.306665] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d11ed09d-59eb-4075-adb9-ec74cfec6118 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.313240] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Waiting for the task: (returnval){ [ 1135.313240] env[63345]: value = "task-1017936" [ 1135.313240] env[63345]: _type = "Task" [ 1135.313240] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.321104] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': task-1017936, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.346192] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017935, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.592794] env[63345]: DEBUG nova.network.neutron [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1135.750383] env[63345]: DEBUG nova.network.neutron [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Updating instance_info_cache with network_info: [{"id": "c20cba8e-091b-4afd-9e5e-4d87441d4aea", "address": "fa:16:3e:42:11:be", "network": {"id": "13df4553-212e-4adb-8de0-da1acdf99671", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-238696814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4560e378b6aa47a3bbb5a2f7c5b76f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc20cba8e-09", "ovs_interfaceid": "c20cba8e-091b-4afd-9e5e-4d87441d4aea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.824559] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': task-1017936, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.849094] env[63345]: DEBUG oslo_vmware.api [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017935, 'name': PowerOnVM_Task, 'duration_secs': 0.642793} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.849448] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1135.850357] env[63345]: INFO nova.compute.manager [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Took 8.22 seconds to spawn the instance on the hypervisor. [ 1135.850691] env[63345]: DEBUG nova.compute.manager [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1135.851568] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9861dc8-721b-499e-889f-e62bd068ee81 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.253441] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Releasing lock "refresh_cache-6b6ce545-0eca-4ef2-a859-c1e8ef978150" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.253785] env[63345]: DEBUG nova.compute.manager [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Instance network_info: |[{"id": "c20cba8e-091b-4afd-9e5e-4d87441d4aea", "address": "fa:16:3e:42:11:be", "network": {"id": "13df4553-212e-4adb-8de0-da1acdf99671", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-238696814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4560e378b6aa47a3bbb5a2f7c5b76f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc20cba8e-09", "ovs_interfaceid": "c20cba8e-091b-4afd-9e5e-4d87441d4aea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1136.254285] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:11:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '90328c7b-15c4-4742-805b-755248d67029', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c20cba8e-091b-4afd-9e5e-4d87441d4aea', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1136.261654] env[63345]: DEBUG oslo.service.loopingcall [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1136.261870] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1136.262142] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49fad0b2-5565-4ff9-8c48-329a73660902 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.281460] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1136.281460] env[63345]: value = "task-1017937" [ 1136.281460] env[63345]: _type = "Task" [ 1136.281460] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.288590] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017937, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.322290] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': task-1017936, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.673885} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.322573] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] c32818dc-b416-44c0-bcac-85b318a9cb84/c32818dc-b416-44c0-bcac-85b318a9cb84.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1136.322829] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1136.323089] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65e8bf87-cd39-4ab6-b3cc-7dc7d537ed43 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.329509] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Waiting for the task: (returnval){ [ 1136.329509] env[63345]: value = "task-1017938" [ 1136.329509] env[63345]: _type = "Task" [ 1136.329509] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.336932] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': task-1017938, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.372908] env[63345]: INFO nova.compute.manager [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Took 14.65 seconds to build instance. [ 1136.791385] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1017937, 'name': CreateVM_Task, 'duration_secs': 0.380647} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.791560] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1136.792264] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1136.792454] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.792782] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1136.793048] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f21914c-eac6-44d8-acfc-03c2d203c5dd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.797204] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1136.797204] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5266f352-c9c5-5553-8817-c6714c2346e1" [ 1136.797204] env[63345]: _type = "Task" [ 1136.797204] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.804381] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5266f352-c9c5-5553-8817-c6714c2346e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.838331] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': task-1017938, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067311} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.838601] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1136.839376] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eef032f-bc0d-4382-aaa2-dea74bbd4499 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.860780] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] c32818dc-b416-44c0-bcac-85b318a9cb84/c32818dc-b416-44c0-bcac-85b318a9cb84.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1136.861084] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad5a9299-8c59-47be-ade1-c8f3f42107e4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.875196] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d52f3731-137e-48b6-b2fd-fafd96b7720f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.162s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.880730] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Waiting for the task: (returnval){ [ 1136.880730] env[63345]: value = "task-1017939" [ 1136.880730] env[63345]: _type = "Task" [ 1136.880730] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.888424] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': task-1017939, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.308986] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5266f352-c9c5-5553-8817-c6714c2346e1, 'name': SearchDatastore_Task, 'duration_secs': 0.039865} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.309329] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1137.309575] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1137.309811] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1137.309964] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.310164] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1137.310438] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f759a668-bdf9-4d82-8abc-4cff533fe53f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.326443] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1137.326636] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1137.327374] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3ba8460-c117-4bed-8a73-48886a986122 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.332359] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1137.332359] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52b26c40-09b7-530b-0d20-22d6f67ad1eb" [ 1137.332359] env[63345]: _type = "Task" [ 1137.332359] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.340873] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b26c40-09b7-530b-0d20-22d6f67ad1eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.389868] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': task-1017939, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.800903] env[63345]: DEBUG nova.compute.manager [req-b378ad8f-ea46-412b-86ad-ebcaa4c64965 req-4caf8244-5379-4184-a410-4b8f2f83f52d service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Received event network-changed-4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1137.801258] env[63345]: DEBUG nova.compute.manager [req-b378ad8f-ea46-412b-86ad-ebcaa4c64965 req-4caf8244-5379-4184-a410-4b8f2f83f52d service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Refreshing instance network info cache due to event network-changed-4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1137.801561] env[63345]: DEBUG oslo_concurrency.lockutils [req-b378ad8f-ea46-412b-86ad-ebcaa4c64965 req-4caf8244-5379-4184-a410-4b8f2f83f52d service nova] Acquiring lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1137.801775] env[63345]: DEBUG oslo_concurrency.lockutils [req-b378ad8f-ea46-412b-86ad-ebcaa4c64965 req-4caf8244-5379-4184-a410-4b8f2f83f52d service nova] Acquired lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.802039] env[63345]: DEBUG nova.network.neutron [req-b378ad8f-ea46-412b-86ad-ebcaa4c64965 req-4caf8244-5379-4184-a410-4b8f2f83f52d service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Refreshing network info cache for port 4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1137.843203] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52b26c40-09b7-530b-0d20-22d6f67ad1eb, 'name': SearchDatastore_Task, 'duration_secs': 0.047611} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.844184] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d824d5c5-c91b-45dd-9d67-f8907a73c1a7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.849802] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1137.849802] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5227b086-46db-f9af-36a5-9d2bce349cfa" [ 1137.849802] env[63345]: _type = "Task" [ 1137.849802] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.857960] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5227b086-46db-f9af-36a5-9d2bce349cfa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.890227] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': task-1017939, 'name': ReconfigVM_Task, 'duration_secs': 0.93905} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.890601] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Reconfigured VM instance instance-00000074 to attach disk [datastore2] c32818dc-b416-44c0-bcac-85b318a9cb84/c32818dc-b416-44c0-bcac-85b318a9cb84.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1137.891253] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e975e532-3219-4335-8e14-41e908a050c7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.897383] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Waiting for the task: (returnval){ [ 1137.897383] env[63345]: value = "task-1017940" [ 1137.897383] env[63345]: _type = "Task" [ 1137.897383] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.904755] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': task-1017940, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.359707] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]5227b086-46db-f9af-36a5-9d2bce349cfa, 'name': SearchDatastore_Task, 'duration_secs': 0.008148} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.360352] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1138.360352] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 6b6ce545-0eca-4ef2-a859-c1e8ef978150/6b6ce545-0eca-4ef2-a859-c1e8ef978150.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1138.360526] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0993ae75-7c67-49fb-b382-a8bfd89a6033 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.367708] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1138.367708] env[63345]: value = "task-1017941" [ 1138.367708] env[63345]: _type = "Task" [ 1138.367708] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.377992] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017941, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.407438] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': task-1017940, 'name': Rename_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.519011] env[63345]: DEBUG nova.network.neutron [req-b378ad8f-ea46-412b-86ad-ebcaa4c64965 req-4caf8244-5379-4184-a410-4b8f2f83f52d service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Updated VIF entry in instance network info cache for port 4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1138.519423] env[63345]: DEBUG nova.network.neutron [req-b378ad8f-ea46-412b-86ad-ebcaa4c64965 req-4caf8244-5379-4184-a410-4b8f2f83f52d service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Updating instance_info_cache with network_info: [{"id": "4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b", "address": "fa:16:3e:0e:aa:f1", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4244898c-6e", "ovs_interfaceid": "4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.877824] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017941, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.908972] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': task-1017940, 'name': Rename_Task, 'duration_secs': 0.988179} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.909333] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1138.909668] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-429e7bed-9a3c-4bf3-8239-ad702fb25b8c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.915953] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Waiting for the task: (returnval){ [ 1138.915953] env[63345]: value = "task-1017942" [ 1138.915953] env[63345]: _type = "Task" [ 1138.915953] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.924273] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': task-1017942, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.022506] env[63345]: DEBUG oslo_concurrency.lockutils [req-b378ad8f-ea46-412b-86ad-ebcaa4c64965 req-4caf8244-5379-4184-a410-4b8f2f83f52d service nova] Releasing lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1139.377886] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017941, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.840574} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.378164] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 6b6ce545-0eca-4ef2-a859-c1e8ef978150/6b6ce545-0eca-4ef2-a859-c1e8ef978150.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1139.378386] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1139.378632] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-45b7250d-15eb-4883-83c6-3ae6623d61ca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.386210] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1139.386210] env[63345]: value = "task-1017943" [ 1139.386210] env[63345]: _type = "Task" [ 1139.386210] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.393969] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017943, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.426883] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': task-1017942, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.895793] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017943, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060873} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.896082] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1139.896843] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc76858-9e48-41fd-ae07-1aea29ee9e5b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.918089] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] 6b6ce545-0eca-4ef2-a859-c1e8ef978150/6b6ce545-0eca-4ef2-a859-c1e8ef978150.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1139.918416] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c7eaea6-038b-4449-8fa9-937da9c1dfa7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.939606] env[63345]: DEBUG oslo_vmware.api [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': task-1017942, 'name': PowerOnVM_Task, 'duration_secs': 0.66946} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.940763] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1139.940980] env[63345]: INFO nova.compute.manager [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Took 9.17 seconds to spawn the instance on the hypervisor. [ 1139.941183] env[63345]: DEBUG nova.compute.manager [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1139.941496] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1139.941496] env[63345]: value = "task-1017944" [ 1139.941496] env[63345]: _type = "Task" [ 1139.941496] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.942161] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5a5b20-47c4-4ad2-b130-ceb7adbb23d8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.955569] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017944, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.454460] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017944, 'name': ReconfigVM_Task, 'duration_secs': 0.311915} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.454774] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Reconfigured VM instance instance-00000075 to attach disk [datastore2] 6b6ce545-0eca-4ef2-a859-c1e8ef978150/6b6ce545-0eca-4ef2-a859-c1e8ef978150.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1140.455451] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-671d30a3-3828-4485-97c4-b1baf8963e07 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.462769] env[63345]: INFO nova.compute.manager [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Took 13.94 seconds to build instance. [ 1140.464804] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1140.464804] env[63345]: value = "task-1017945" [ 1140.464804] env[63345]: _type = "Task" [ 1140.464804] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.472712] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017945, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.965971] env[63345]: DEBUG oslo_concurrency.lockutils [None req-af30f94c-7355-46d6-aa69-e8b79fed4e64 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Lock "c32818dc-b416-44c0-bcac-85b318a9cb84" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.446s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.975268] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017945, 'name': Rename_Task, 'duration_secs': 0.148379} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.975569] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1140.975800] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce718fc3-93ff-4c9b-89af-d175370fc49b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.983026] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1140.983026] env[63345]: value = "task-1017946" [ 1140.983026] env[63345]: _type = "Task" [ 1140.983026] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.991924] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017946, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.006555] env[63345]: DEBUG oslo_concurrency.lockutils [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Acquiring lock "c32818dc-b416-44c0-bcac-85b318a9cb84" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.006881] env[63345]: DEBUG oslo_concurrency.lockutils [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Lock "c32818dc-b416-44c0-bcac-85b318a9cb84" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.007172] env[63345]: DEBUG oslo_concurrency.lockutils [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Acquiring lock "c32818dc-b416-44c0-bcac-85b318a9cb84-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.007427] env[63345]: DEBUG oslo_concurrency.lockutils [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Lock "c32818dc-b416-44c0-bcac-85b318a9cb84-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.007667] env[63345]: DEBUG oslo_concurrency.lockutils [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Lock "c32818dc-b416-44c0-bcac-85b318a9cb84-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.009827] env[63345]: INFO nova.compute.manager [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Terminating instance [ 1141.493334] env[63345]: DEBUG oslo_vmware.api [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017946, 'name': PowerOnVM_Task, 'duration_secs': 0.421801} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.493563] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1141.493911] env[63345]: INFO nova.compute.manager [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Took 8.51 seconds to spawn the instance on the hypervisor. [ 1141.494193] env[63345]: DEBUG nova.compute.manager [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1141.495078] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0f5e70-a054-4b23-a0c3-9715e30f4a64 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.514351] env[63345]: DEBUG nova.compute.manager [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1141.514584] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1141.515487] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae138db8-7c8d-486e-a526-595fc36d0f34 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.523352] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1141.523592] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-701554ca-9cda-44de-8324-56cf6b514bf9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.529219] env[63345]: DEBUG oslo_vmware.api [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Waiting for the task: (returnval){ [ 1141.529219] env[63345]: value = "task-1017947" [ 1141.529219] env[63345]: _type = "Task" [ 1141.529219] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.538077] env[63345]: DEBUG oslo_vmware.api [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': task-1017947, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.011690] env[63345]: INFO nova.compute.manager [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Took 15.09 seconds to build instance. [ 1142.039368] env[63345]: DEBUG oslo_vmware.api [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': task-1017947, 'name': PowerOffVM_Task, 'duration_secs': 0.202263} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.039729] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1142.040040] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1142.040404] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cbb3d24b-fee0-49c9-8224-1e3e61000097 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.104841] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1142.105096] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1142.105318] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Deleting the datastore file [datastore2] c32818dc-b416-44c0-bcac-85b318a9cb84 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1142.105577] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b2627142-9565-46d5-93cb-d1fd794d3b31 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.111927] env[63345]: DEBUG oslo_vmware.api [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Waiting for the task: (returnval){ [ 1142.111927] env[63345]: value = "task-1017949" [ 1142.111927] env[63345]: _type = "Task" [ 1142.111927] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.119554] env[63345]: DEBUG oslo_vmware.api [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': task-1017949, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.348023] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.348023] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.373549] env[63345]: DEBUG nova.compute.manager [req-93f429bd-88d1-461d-80d3-c1d9a68cd04c req-4cf9184c-5605-4b12-b708-7793e89f0fb2 service nova] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Received event network-changed-c20cba8e-091b-4afd-9e5e-4d87441d4aea {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1142.373960] env[63345]: DEBUG nova.compute.manager [req-93f429bd-88d1-461d-80d3-c1d9a68cd04c req-4cf9184c-5605-4b12-b708-7793e89f0fb2 service nova] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Refreshing instance network info cache due to event network-changed-c20cba8e-091b-4afd-9e5e-4d87441d4aea. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1142.373960] env[63345]: DEBUG oslo_concurrency.lockutils [req-93f429bd-88d1-461d-80d3-c1d9a68cd04c req-4cf9184c-5605-4b12-b708-7793e89f0fb2 service nova] Acquiring lock "refresh_cache-6b6ce545-0eca-4ef2-a859-c1e8ef978150" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.374200] env[63345]: DEBUG oslo_concurrency.lockutils [req-93f429bd-88d1-461d-80d3-c1d9a68cd04c req-4cf9184c-5605-4b12-b708-7793e89f0fb2 service nova] Acquired lock "refresh_cache-6b6ce545-0eca-4ef2-a859-c1e8ef978150" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.374474] env[63345]: DEBUG nova.network.neutron [req-93f429bd-88d1-461d-80d3-c1d9a68cd04c req-4cf9184c-5605-4b12-b708-7793e89f0fb2 service nova] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Refreshing network info cache for port c20cba8e-091b-4afd-9e5e-4d87441d4aea {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1142.513367] env[63345]: DEBUG oslo_concurrency.lockutils [None req-95eb49d3-c94a-453a-b10f-27c942571551 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.600s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.622044] env[63345]: DEBUG oslo_vmware.api [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Task: {'id': task-1017949, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.212457} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.622392] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1142.622638] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1142.622753] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1142.622934] env[63345]: INFO nova.compute.manager [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1142.623201] env[63345]: DEBUG oslo.service.loopingcall [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1142.623429] env[63345]: DEBUG nova.compute.manager [-] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1142.623513] env[63345]: DEBUG nova.network.neutron [-] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1142.850725] env[63345]: DEBUG nova.compute.utils [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1142.896034] env[63345]: DEBUG nova.compute.manager [req-66c1bafe-eac8-4014-bf56-db8069dafdc6 req-11c3ade6-62c2-4efd-bb72-47eca6dc7f35 service nova] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Received event network-vif-deleted-59a71e22-f87b-421e-a3e0-1c3fa40e695e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1142.896257] env[63345]: INFO nova.compute.manager [req-66c1bafe-eac8-4014-bf56-db8069dafdc6 req-11c3ade6-62c2-4efd-bb72-47eca6dc7f35 service nova] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Neutron deleted interface 59a71e22-f87b-421e-a3e0-1c3fa40e695e; detaching it from the instance and deleting it from the info cache [ 1142.896447] env[63345]: DEBUG nova.network.neutron [req-66c1bafe-eac8-4014-bf56-db8069dafdc6 req-11c3ade6-62c2-4efd-bb72-47eca6dc7f35 service nova] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.089420] env[63345]: DEBUG nova.network.neutron [req-93f429bd-88d1-461d-80d3-c1d9a68cd04c req-4cf9184c-5605-4b12-b708-7793e89f0fb2 service nova] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Updated VIF entry in instance network info cache for port c20cba8e-091b-4afd-9e5e-4d87441d4aea. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1143.089810] env[63345]: DEBUG nova.network.neutron [req-93f429bd-88d1-461d-80d3-c1d9a68cd04c req-4cf9184c-5605-4b12-b708-7793e89f0fb2 service nova] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Updating instance_info_cache with network_info: [{"id": "c20cba8e-091b-4afd-9e5e-4d87441d4aea", "address": "fa:16:3e:42:11:be", "network": {"id": "13df4553-212e-4adb-8de0-da1acdf99671", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-238696814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4560e378b6aa47a3bbb5a2f7c5b76f5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc20cba8e-09", "ovs_interfaceid": "c20cba8e-091b-4afd-9e5e-4d87441d4aea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.353081] env[63345]: DEBUG nova.network.neutron [-] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.354318] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.400246] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-60a10af0-81a0-4663-bdfb-89e2dff1cc3e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.409762] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f4be970-c82c-4055-a4db-b84944a1ac90 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.438198] env[63345]: DEBUG nova.compute.manager [req-66c1bafe-eac8-4014-bf56-db8069dafdc6 req-11c3ade6-62c2-4efd-bb72-47eca6dc7f35 service nova] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Detach interface failed, port_id=59a71e22-f87b-421e-a3e0-1c3fa40e695e, reason: Instance c32818dc-b416-44c0-bcac-85b318a9cb84 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1143.593894] env[63345]: DEBUG oslo_concurrency.lockutils [req-93f429bd-88d1-461d-80d3-c1d9a68cd04c req-4cf9184c-5605-4b12-b708-7793e89f0fb2 service nova] Releasing lock "refresh_cache-6b6ce545-0eca-4ef2-a859-c1e8ef978150" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1143.857175] env[63345]: INFO nova.compute.manager [-] [instance: c32818dc-b416-44c0-bcac-85b318a9cb84] Took 1.23 seconds to deallocate network for instance. [ 1144.366728] env[63345]: DEBUG oslo_concurrency.lockutils [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.366728] env[63345]: DEBUG oslo_concurrency.lockutils [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.366728] env[63345]: DEBUG nova.objects.instance [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Lazy-loading 'resources' on Instance uuid c32818dc-b416-44c0-bcac-85b318a9cb84 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1144.418087] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.418374] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.418627] env[63345]: INFO nova.compute.manager [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Attaching volume d16e80cf-439a-4033-8575-b9e5efa65dd8 to /dev/sdb [ 1144.452710] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55eadf2b-62bb-4a08-8d59-f2323b0f7a29 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.460500] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469ed76b-952a-49f6-8de2-5d02f72ffb60 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.474417] env[63345]: DEBUG nova.virt.block_device [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating existing volume attachment record: 50ece722-1bc0-459d-8ce4-90ab164fdf4f {{(pid=63345) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1144.972922] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b63bdc-52a7-4a2e-a952-8dc6a2ce7e04 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.980170] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fe0b0e-7c11-4ef4-9589-2d2759533292 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.013942] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa16b201-74da-4c5a-9d25-be9f411f973c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.021426] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5373d4fb-af88-414c-ba7e-990a49713f85 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.034312] env[63345]: DEBUG nova.compute.provider_tree [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1145.537081] env[63345]: DEBUG nova.scheduler.client.report [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1146.041529] env[63345]: DEBUG oslo_concurrency.lockutils [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.675s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.059777] env[63345]: INFO nova.scheduler.client.report [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Deleted allocations for instance c32818dc-b416-44c0-bcac-85b318a9cb84 [ 1146.567204] env[63345]: DEBUG oslo_concurrency.lockutils [None req-15f2a12c-b21c-4855-b46c-ed2db38d29c6 tempest-InstanceActionsNegativeTestJSON-1310339896 tempest-InstanceActionsNegativeTestJSON-1310339896-project-member] Lock "c32818dc-b416-44c0-bcac-85b318a9cb84" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.560s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1149.523692] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Volume attach. Driver type: vmdk {{(pid=63345) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1149.523957] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226182', 'volume_id': 'd16e80cf-439a-4033-8575-b9e5efa65dd8', 'name': 'volume-d16e80cf-439a-4033-8575-b9e5efa65dd8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46', 'attached_at': '', 'detached_at': '', 'volume_id': 'd16e80cf-439a-4033-8575-b9e5efa65dd8', 'serial': 'd16e80cf-439a-4033-8575-b9e5efa65dd8'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1149.524975] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b698b059-15ed-47f7-bd96-bc95953778ea {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.541132] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1b3b46-a558-47e1-89cb-7ad1f76dc592 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.253560] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] volume-d16e80cf-439a-4033-8575-b9e5efa65dd8/volume-d16e80cf-439a-4033-8575-b9e5efa65dd8.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1150.253560] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b76daab4-6b47-4067-8b1a-8b2b0add2f45 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.274065] env[63345]: DEBUG oslo_vmware.api [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1150.274065] env[63345]: value = "task-1017954" [ 1150.274065] env[63345]: _type = "Task" [ 1150.274065] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.281637] env[63345]: DEBUG oslo_vmware.api [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017954, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.783403] env[63345]: DEBUG oslo_vmware.api [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017954, 'name': ReconfigVM_Task, 'duration_secs': 0.40028} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.783711] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Reconfigured VM instance instance-00000071 to attach disk [datastore1] volume-d16e80cf-439a-4033-8575-b9e5efa65dd8/volume-d16e80cf-439a-4033-8575-b9e5efa65dd8.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1150.788297] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8501882-a70c-4870-afb6-8886b4a2674a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.802918] env[63345]: DEBUG oslo_vmware.api [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1150.802918] env[63345]: value = "task-1017955" [ 1150.802918] env[63345]: _type = "Task" [ 1150.802918] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.810431] env[63345]: DEBUG oslo_vmware.api [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017955, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.312422] env[63345]: DEBUG oslo_vmware.api [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017955, 'name': ReconfigVM_Task, 'duration_secs': 0.161897} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.312658] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226182', 'volume_id': 'd16e80cf-439a-4033-8575-b9e5efa65dd8', 'name': 'volume-d16e80cf-439a-4033-8575-b9e5efa65dd8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46', 'attached_at': '', 'detached_at': '', 'volume_id': 'd16e80cf-439a-4033-8575-b9e5efa65dd8', 'serial': 'd16e80cf-439a-4033-8575-b9e5efa65dd8'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1152.139390] env[63345]: DEBUG oslo_concurrency.lockutils [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "95738bee-d291-4f27-aeff-9445939bb3fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.139739] env[63345]: DEBUG oslo_concurrency.lockutils [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "95738bee-d291-4f27-aeff-9445939bb3fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.139830] env[63345]: DEBUG oslo_concurrency.lockutils [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "95738bee-d291-4f27-aeff-9445939bb3fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.140030] env[63345]: DEBUG oslo_concurrency.lockutils [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "95738bee-d291-4f27-aeff-9445939bb3fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.140219] env[63345]: DEBUG oslo_concurrency.lockutils [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "95738bee-d291-4f27-aeff-9445939bb3fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.143883] env[63345]: INFO nova.compute.manager [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Terminating instance [ 1152.349043] env[63345]: DEBUG nova.objects.instance [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lazy-loading 'flavor' on Instance uuid 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1152.647810] env[63345]: DEBUG nova.compute.manager [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1152.648080] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1152.648982] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5b18d2-46a2-4b16-aecb-a613a1cfac40 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.656356] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1152.656548] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c11d213-3c2e-423b-baf4-6e853950fcf7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.661986] env[63345]: DEBUG oslo_vmware.api [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1152.661986] env[63345]: value = "task-1017956" [ 1152.661986] env[63345]: _type = "Task" [ 1152.661986] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.670874] env[63345]: DEBUG oslo_vmware.api [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017956, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.853191] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4c21fc74-8be6-48e1-98b9-3fd9ebb68f74 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.435s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.172111] env[63345]: DEBUG oslo_vmware.api [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017956, 'name': PowerOffVM_Task, 'duration_secs': 0.177621} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.172111] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1153.172534] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1153.172534] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4e44739a-93c2-47ec-af5a-01ec0ad13112 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.242326] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1153.242539] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1153.242740] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Deleting the datastore file [datastore2] 95738bee-d291-4f27-aeff-9445939bb3fa {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1153.243011] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9b365d7-739c-4ef2-b16a-ba9f1c4ba913 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.248814] env[63345]: DEBUG oslo_vmware.api [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for the task: (returnval){ [ 1153.248814] env[63345]: value = "task-1017958" [ 1153.248814] env[63345]: _type = "Task" [ 1153.248814] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.255943] env[63345]: DEBUG oslo_vmware.api [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017958, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.518045] env[63345]: DEBUG nova.compute.manager [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Stashing vm_state: active {{(pid=63345) _prep_resize /opt/stack/nova/nova/compute/manager.py:5953}} [ 1153.758907] env[63345]: DEBUG oslo_vmware.api [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Task: {'id': task-1017958, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133094} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.759180] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1153.759374] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1153.759553] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1153.759727] env[63345]: INFO nova.compute.manager [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1153.759961] env[63345]: DEBUG oslo.service.loopingcall [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1153.760175] env[63345]: DEBUG nova.compute.manager [-] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1153.760268] env[63345]: DEBUG nova.network.neutron [-] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1154.041685] env[63345]: DEBUG oslo_concurrency.lockutils [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1154.041964] env[63345]: DEBUG oslo_concurrency.lockutils [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.200851] env[63345]: DEBUG nova.compute.manager [req-d59666fa-85f9-4a77-89b6-0150dd4c7437 req-7dc64d83-d279-4d72-b01b-a636c519231b service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Received event network-vif-deleted-f9b10cca-c2c3-45d2-a329-61efee5dde7f {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1154.201116] env[63345]: INFO nova.compute.manager [req-d59666fa-85f9-4a77-89b6-0150dd4c7437 req-7dc64d83-d279-4d72-b01b-a636c519231b service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Neutron deleted interface f9b10cca-c2c3-45d2-a329-61efee5dde7f; detaching it from the instance and deleting it from the info cache [ 1154.201269] env[63345]: DEBUG nova.network.neutron [req-d59666fa-85f9-4a77-89b6-0150dd4c7437 req-7dc64d83-d279-4d72-b01b-a636c519231b service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.547345] env[63345]: INFO nova.compute.claims [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1154.679310] env[63345]: DEBUG nova.network.neutron [-] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.704203] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa8054d8-4bc1-4a48-be22-c29c667f4683 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.713174] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c135f7ff-2f5d-4fea-ae50-ae7f9fb08d19 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.741056] env[63345]: DEBUG nova.compute.manager [req-d59666fa-85f9-4a77-89b6-0150dd4c7437 req-7dc64d83-d279-4d72-b01b-a636c519231b service nova] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Detach interface failed, port_id=f9b10cca-c2c3-45d2-a329-61efee5dde7f, reason: Instance 95738bee-d291-4f27-aeff-9445939bb3fa could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1155.054717] env[63345]: INFO nova.compute.resource_tracker [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating resource usage from migration 8378a98b-90aa-43e1-a3c1-112f8846a508 [ 1155.158507] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b14b90-00b9-4e96-9ea2-1f8933521505 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.166509] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43437b09-ff96-4f3c-a3d5-f0ede98d15dc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.198558] env[63345]: INFO nova.compute.manager [-] [instance: 95738bee-d291-4f27-aeff-9445939bb3fa] Took 1.44 seconds to deallocate network for instance. [ 1155.201191] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba147134-360d-45eb-a17c-4ffc300844ad {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.211956] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73dbbfe-a0c8-4aee-8f38-d4c2a6bb12b4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.226413] env[63345]: DEBUG nova.compute.provider_tree [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1155.708533] env[63345]: DEBUG oslo_concurrency.lockutils [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.729544] env[63345]: DEBUG nova.scheduler.client.report [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1156.234659] env[63345]: DEBUG oslo_concurrency.lockutils [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.192s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1156.235041] env[63345]: INFO nova.compute.manager [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Migrating [ 1156.242708] env[63345]: DEBUG oslo_concurrency.lockutils [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.534s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1156.242955] env[63345]: DEBUG nova.objects.instance [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lazy-loading 'resources' on Instance uuid 95738bee-d291-4f27-aeff-9445939bb3fa {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1156.353163] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6166f092-1bde-426f-a372-11378925584b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.360865] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297c1daa-1b73-4a28-b9bd-2fcd915fcae2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.390445] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a39ec12-cd81-4e27-957a-52b606dbcc84 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.398060] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-823b854e-0c8e-4187-b0a7-10720c822eb3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.411300] env[63345]: DEBUG nova.compute.provider_tree [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1156.751269] env[63345]: DEBUG oslo_concurrency.lockutils [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1156.751533] env[63345]: DEBUG oslo_concurrency.lockutils [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.751769] env[63345]: DEBUG nova.network.neutron [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1156.914572] env[63345]: DEBUG nova.scheduler.client.report [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1157.419412] env[63345]: DEBUG oslo_concurrency.lockutils [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.177s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1157.436058] env[63345]: INFO nova.scheduler.client.report [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Deleted allocations for instance 95738bee-d291-4f27-aeff-9445939bb3fa [ 1157.456533] env[63345]: DEBUG nova.network.neutron [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating instance_info_cache with network_info: [{"id": "104a12d2-9632-4d24-a0e3-d4b18e907a58", "address": "fa:16:3e:a9:14:31", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap104a12d2-96", "ovs_interfaceid": "104a12d2-9632-4d24-a0e3-d4b18e907a58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.943642] env[63345]: DEBUG oslo_concurrency.lockutils [None req-67e1ace4-91a3-4f5f-ab5b-9793de10c92c tempest-AttachVolumeShelveTestJSON-2082682761 tempest-AttachVolumeShelveTestJSON-2082682761-project-member] Lock "95738bee-d291-4f27-aeff-9445939bb3fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.804s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1157.959915] env[63345]: DEBUG oslo_concurrency.lockutils [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1159.474576] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-094cb5f9-a692-41ec-86bb-faff83b11f93 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.497031] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating instance '5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46' progress to 0 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1160.002428] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1160.003465] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ead9f35b-7e9a-450c-8b3a-f2afb0d443e3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.010559] env[63345]: DEBUG oslo_vmware.api [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1160.010559] env[63345]: value = "task-1017960" [ 1160.010559] env[63345]: _type = "Task" [ 1160.010559] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.019146] env[63345]: DEBUG oslo_vmware.api [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017960, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.520486] env[63345]: DEBUG oslo_vmware.api [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017960, 'name': PowerOffVM_Task, 'duration_secs': 0.205858} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.520843] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1160.520930] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating instance '5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46' progress to 17 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1161.027698] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:39Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1161.027949] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1161.028138] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1161.028330] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1161.028486] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1161.028642] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1161.028851] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1161.029025] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1161.029215] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1161.029384] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1161.029568] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1161.034659] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28603cf6-5ab0-482c-9730-6e90a11b71cc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.050291] env[63345]: DEBUG oslo_vmware.api [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1161.050291] env[63345]: value = "task-1017961" [ 1161.050291] env[63345]: _type = "Task" [ 1161.050291] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.058418] env[63345]: DEBUG oslo_vmware.api [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017961, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.175315] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.175569] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.560577] env[63345]: DEBUG oslo_vmware.api [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017961, 'name': ReconfigVM_Task, 'duration_secs': 0.196704} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.560956] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating instance '5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46' progress to 33 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1161.679217] env[63345]: DEBUG nova.compute.utils [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1162.067739] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1162.067990] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1162.068176] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1162.068418] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1162.068651] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1162.068815] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1162.069039] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1162.069212] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1162.069383] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1162.069552] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1162.069727] env[63345]: DEBUG nova.virt.hardware [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1162.075259] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Reconfiguring VM instance instance-00000071 to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1162.075566] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e568620e-af65-4806-82a0-147d2ccc3309 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.094360] env[63345]: DEBUG oslo_vmware.api [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1162.094360] env[63345]: value = "task-1017962" [ 1162.094360] env[63345]: _type = "Task" [ 1162.094360] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.102177] env[63345]: DEBUG oslo_vmware.api [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017962, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.182183] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1162.605159] env[63345]: DEBUG oslo_vmware.api [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017962, 'name': ReconfigVM_Task, 'duration_secs': 0.216984} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.605761] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Reconfigured VM instance instance-00000071 to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1162.606591] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41fa9172-2e8b-4245-9a0f-9d91cefad6a9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.635654] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46/5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1162.635993] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c11c6075-df8d-409c-9ebf-d49a38283c6b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.654760] env[63345]: DEBUG oslo_vmware.api [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1162.654760] env[63345]: value = "task-1017963" [ 1162.654760] env[63345]: _type = "Task" [ 1162.654760] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.663036] env[63345]: DEBUG oslo_vmware.api [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017963, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.165320] env[63345]: DEBUG oslo_vmware.api [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017963, 'name': ReconfigVM_Task, 'duration_secs': 0.282183} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.165616] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Reconfigured VM instance instance-00000071 to attach disk [datastore2] 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46/5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1163.165897] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating instance '5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46' progress to 50 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1163.252910] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1163.252910] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1163.253188] env[63345]: INFO nova.compute.manager [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Attaching volume 60a952b8-2a3a-4a4c-a2a7-e0a662a4249f to /dev/sdb [ 1163.288472] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c375ae-35dc-41bd-83ca-acf741123842 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.295469] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-922f1390-359c-4d38-adcb-72a00fcd085c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.308386] env[63345]: DEBUG nova.virt.block_device [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Updating existing volume attachment record: 06b4d770-0db7-4f43-89fa-5ddb3143ba23 {{(pid=63345) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1163.672775] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abf0879-e252-4313-9a83-3a87888e7884 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.694713] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f214f4c-8360-40d8-8d55-dc4e8122a432 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.715510] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating instance '5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46' progress to 67 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1165.345602] env[63345]: DEBUG nova.network.neutron [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Port 104a12d2-9632-4d24-a0e3-d4b18e907a58 binding to destination host cpu-1 is already ACTIVE {{(pid=63345) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1166.367652] env[63345]: DEBUG oslo_concurrency.lockutils [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.367895] env[63345]: DEBUG oslo_concurrency.lockutils [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.368172] env[63345]: DEBUG oslo_concurrency.lockutils [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1167.420087] env[63345]: DEBUG oslo_concurrency.lockutils [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1167.420376] env[63345]: DEBUG oslo_concurrency.lockutils [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1167.420533] env[63345]: DEBUG nova.network.neutron [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1167.853266] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Volume attach. Driver type: vmdk {{(pid=63345) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1167.853520] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226183', 'volume_id': '60a952b8-2a3a-4a4c-a2a7-e0a662a4249f', 'name': 'volume-60a952b8-2a3a-4a4c-a2a7-e0a662a4249f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '735c5f4f-98c1-4c75-bb82-66e49b0233f6', 'attached_at': '', 'detached_at': '', 'volume_id': '60a952b8-2a3a-4a4c-a2a7-e0a662a4249f', 'serial': '60a952b8-2a3a-4a4c-a2a7-e0a662a4249f'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1167.854483] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e453bb-1abf-4889-ac9d-5a0dcf45fee0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.870570] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39bf1004-59ee-4c28-a900-27d18ce78da6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.894573] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] volume-60a952b8-2a3a-4a4c-a2a7-e0a662a4249f/volume-60a952b8-2a3a-4a4c-a2a7-e0a662a4249f.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1167.894825] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d4f97e1-85b5-4d25-a7c0-26bf59f2b722 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.912436] env[63345]: DEBUG oslo_vmware.api [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1167.912436] env[63345]: value = "task-1017966" [ 1167.912436] env[63345]: _type = "Task" [ 1167.912436] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.920057] env[63345]: DEBUG oslo_vmware.api [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017966, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.136983] env[63345]: DEBUG nova.network.neutron [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating instance_info_cache with network_info: [{"id": "104a12d2-9632-4d24-a0e3-d4b18e907a58", "address": "fa:16:3e:a9:14:31", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap104a12d2-96", "ovs_interfaceid": "104a12d2-9632-4d24-a0e3-d4b18e907a58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1168.421740] env[63345]: DEBUG oslo_vmware.api [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017966, 'name': ReconfigVM_Task, 'duration_secs': 0.337038} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.422039] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Reconfigured VM instance instance-00000072 to attach disk [datastore2] volume-60a952b8-2a3a-4a4c-a2a7-e0a662a4249f/volume-60a952b8-2a3a-4a4c-a2a7-e0a662a4249f.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1168.426986] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-929efd76-a003-4bf8-bce5-6923d8aaf620 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.441113] env[63345]: DEBUG oslo_vmware.api [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1168.441113] env[63345]: value = "task-1017967" [ 1168.441113] env[63345]: _type = "Task" [ 1168.441113] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.448685] env[63345]: DEBUG oslo_vmware.api [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017967, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.639806] env[63345]: DEBUG oslo_concurrency.lockutils [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1168.950966] env[63345]: DEBUG oslo_vmware.api [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017967, 'name': ReconfigVM_Task, 'duration_secs': 0.133093} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.951303] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226183', 'volume_id': '60a952b8-2a3a-4a4c-a2a7-e0a662a4249f', 'name': 'volume-60a952b8-2a3a-4a4c-a2a7-e0a662a4249f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '735c5f4f-98c1-4c75-bb82-66e49b0233f6', 'attached_at': '', 'detached_at': '', 'volume_id': '60a952b8-2a3a-4a4c-a2a7-e0a662a4249f', 'serial': '60a952b8-2a3a-4a4c-a2a7-e0a662a4249f'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1169.148966] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dce232c-7af0-4d8d-8a1b-0ff5334d820f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.156618] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a1953b-9489-480a-9b63-55e70c9443a7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.986257] env[63345]: DEBUG nova.objects.instance [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lazy-loading 'flavor' on Instance uuid 735c5f4f-98c1-4c75-bb82-66e49b0233f6 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1170.249699] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2004d18c-eb96-4d8a-9da1-b72a6403882c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.271625] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e1a62dd-b7b4-4d05-a63e-490895744615 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.278512] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating instance '5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46' progress to 83 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1170.491421] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7fb5c6ec-c619-4b57-9a2a-f412ae1cd0a1 tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.238s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1170.669812] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1177d40c-7635-425e-aa4f-8ad10b501b1d tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.670138] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1177d40c-7635-425e-aa4f-8ad10b501b1d tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1170.784432] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1170.784773] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-511dd32c-6e61-4f09-8dcb-547c7e1349ef {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.792234] env[63345]: DEBUG oslo_vmware.api [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1170.792234] env[63345]: value = "task-1017968" [ 1170.792234] env[63345]: _type = "Task" [ 1170.792234] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.799730] env[63345]: DEBUG oslo_vmware.api [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017968, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.172900] env[63345]: INFO nova.compute.manager [None req-1177d40c-7635-425e-aa4f-8ad10b501b1d tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Detaching volume 60a952b8-2a3a-4a4c-a2a7-e0a662a4249f [ 1171.205031] env[63345]: INFO nova.virt.block_device [None req-1177d40c-7635-425e-aa4f-8ad10b501b1d tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Attempting to driver detach volume 60a952b8-2a3a-4a4c-a2a7-e0a662a4249f from mountpoint /dev/sdb [ 1171.205290] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1177d40c-7635-425e-aa4f-8ad10b501b1d tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Volume detach. Driver type: vmdk {{(pid=63345) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1171.205502] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1177d40c-7635-425e-aa4f-8ad10b501b1d tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226183', 'volume_id': '60a952b8-2a3a-4a4c-a2a7-e0a662a4249f', 'name': 'volume-60a952b8-2a3a-4a4c-a2a7-e0a662a4249f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '735c5f4f-98c1-4c75-bb82-66e49b0233f6', 'attached_at': '', 'detached_at': '', 'volume_id': '60a952b8-2a3a-4a4c-a2a7-e0a662a4249f', 'serial': '60a952b8-2a3a-4a4c-a2a7-e0a662a4249f'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1171.206383] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b9840d-5226-456c-b5dd-595fb08a6907 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.228248] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a34bcab-9f50-40dc-a466-94cce1464d99 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.234897] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60fb86b9-fa90-4bc1-bde9-4d0ffe748a3e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.254259] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d167323d-ac9f-446e-935b-bdb2abcb6d9a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.267989] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1177d40c-7635-425e-aa4f-8ad10b501b1d tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] The volume has not been displaced from its original location: [datastore2] volume-60a952b8-2a3a-4a4c-a2a7-e0a662a4249f/volume-60a952b8-2a3a-4a4c-a2a7-e0a662a4249f.vmdk. No consolidation needed. {{(pid=63345) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1171.273079] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1177d40c-7635-425e-aa4f-8ad10b501b1d tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Reconfiguring VM instance instance-00000072 to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1171.273342] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b906f6ac-a954-4306-af51-3bf37f9b57b2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.290513] env[63345]: DEBUG oslo_vmware.api [None req-1177d40c-7635-425e-aa4f-8ad10b501b1d tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1171.290513] env[63345]: value = "task-1017969" [ 1171.290513] env[63345]: _type = "Task" [ 1171.290513] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.300373] env[63345]: DEBUG oslo_vmware.api [None req-1177d40c-7635-425e-aa4f-8ad10b501b1d tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017969, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.303064] env[63345]: DEBUG oslo_vmware.api [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017968, 'name': PowerOnVM_Task, 'duration_secs': 0.362705} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.303312] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1171.303502] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-97cd02fb-90ec-4381-bb09-070baa7d52f7 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating instance '5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46' progress to 100 {{(pid=63345) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1354}} [ 1171.800136] env[63345]: DEBUG oslo_vmware.api [None req-1177d40c-7635-425e-aa4f-8ad10b501b1d tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017969, 'name': ReconfigVM_Task, 'duration_secs': 0.209794} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.800429] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1177d40c-7635-425e-aa4f-8ad10b501b1d tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Reconfigured VM instance instance-00000072 to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1171.804925] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a849e533-74cd-4612-b103-e368ccd40471 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.822460] env[63345]: DEBUG oslo_vmware.api [None req-1177d40c-7635-425e-aa4f-8ad10b501b1d tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1171.822460] env[63345]: value = "task-1017970" [ 1171.822460] env[63345]: _type = "Task" [ 1171.822460] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.829481] env[63345]: DEBUG oslo_vmware.api [None req-1177d40c-7635-425e-aa4f-8ad10b501b1d tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017970, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.332520] env[63345]: DEBUG oslo_vmware.api [None req-1177d40c-7635-425e-aa4f-8ad10b501b1d tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017970, 'name': ReconfigVM_Task, 'duration_secs': 0.134896} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.332877] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-1177d40c-7635-425e-aa4f-8ad10b501b1d tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226183', 'volume_id': '60a952b8-2a3a-4a4c-a2a7-e0a662a4249f', 'name': 'volume-60a952b8-2a3a-4a4c-a2a7-e0a662a4249f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '735c5f4f-98c1-4c75-bb82-66e49b0233f6', 'attached_at': '', 'detached_at': '', 'volume_id': '60a952b8-2a3a-4a4c-a2a7-e0a662a4249f', 'serial': '60a952b8-2a3a-4a4c-a2a7-e0a662a4249f'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1172.874088] env[63345]: DEBUG nova.objects.instance [None req-1177d40c-7635-425e-aa4f-8ad10b501b1d tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lazy-loading 'flavor' on Instance uuid 735c5f4f-98c1-4c75-bb82-66e49b0233f6 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1173.184200] env[63345]: DEBUG nova.network.neutron [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Port 104a12d2-9632-4d24-a0e3-d4b18e907a58 binding to destination host cpu-1 is already ACTIVE {{(pid=63345) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1173.184496] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1173.184684] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1173.184860] env[63345]: DEBUG nova.network.neutron [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1173.880095] env[63345]: DEBUG oslo_concurrency.lockutils [None req-1177d40c-7635-425e-aa4f-8ad10b501b1d tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.210s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1173.901066] env[63345]: DEBUG nova.network.neutron [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating instance_info_cache with network_info: [{"id": "104a12d2-9632-4d24-a0e3-d4b18e907a58", "address": "fa:16:3e:a9:14:31", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap104a12d2-96", "ovs_interfaceid": "104a12d2-9632-4d24-a0e3-d4b18e907a58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.404055] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1174.908591] env[63345]: DEBUG nova.compute.manager [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63345) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:900}} [ 1174.910810] env[63345]: DEBUG oslo_concurrency.lockutils [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.910947] env[63345]: DEBUG oslo_concurrency.lockutils [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.911163] env[63345]: DEBUG oslo_concurrency.lockutils [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.911618] env[63345]: DEBUG oslo_concurrency.lockutils [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.911618] env[63345]: DEBUG oslo_concurrency.lockutils [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.916300] env[63345]: INFO nova.compute.manager [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Terminating instance [ 1175.087730] env[63345]: DEBUG oslo_concurrency.lockutils [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.087910] env[63345]: DEBUG oslo_concurrency.lockutils [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1175.420170] env[63345]: DEBUG nova.compute.manager [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1175.420391] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1175.421321] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34cb5431-733b-4f21-957a-7bd908c536f8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.430267] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1175.430496] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-045c7e5e-e553-458e-b8c5-a9067fa43daa {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.436515] env[63345]: DEBUG oslo_vmware.api [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1175.436515] env[63345]: value = "task-1017971" [ 1175.436515] env[63345]: _type = "Task" [ 1175.436515] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.443653] env[63345]: DEBUG oslo_vmware.api [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017971, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.590669] env[63345]: DEBUG nova.compute.utils [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1175.946333] env[63345]: DEBUG oslo_vmware.api [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017971, 'name': PowerOffVM_Task, 'duration_secs': 0.188208} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.946616] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1175.946860] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1175.947175] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-986d16f7-8970-4d3a-8eb9-1b7023b36b12 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.009838] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.010104] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.018032] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1176.018351] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1176.018607] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Deleting the datastore file [datastore2] 735c5f4f-98c1-4c75-bb82-66e49b0233f6 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1176.018920] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ada93301-313d-4d2c-ac1d-47b549c47416 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.027119] env[63345]: DEBUG oslo_vmware.api [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for the task: (returnval){ [ 1176.027119] env[63345]: value = "task-1017973" [ 1176.027119] env[63345]: _type = "Task" [ 1176.027119] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.035029] env[63345]: DEBUG oslo_vmware.api [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017973, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.093852] env[63345]: DEBUG oslo_concurrency.lockutils [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1176.513758] env[63345]: DEBUG nova.objects.instance [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lazy-loading 'migration_context' on Instance uuid 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1176.536729] env[63345]: DEBUG oslo_vmware.api [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Task: {'id': task-1017973, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127052} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.536986] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1176.537193] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1176.537374] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1176.537553] env[63345]: INFO nova.compute.manager [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1176.537797] env[63345]: DEBUG oslo.service.loopingcall [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1176.537989] env[63345]: DEBUG nova.compute.manager [-] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1176.538091] env[63345]: DEBUG nova.network.neutron [-] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1177.007030] env[63345]: DEBUG nova.compute.manager [req-6493119d-1ca0-4d23-9ab1-cce50264db93 req-91a5ae86-3935-424d-be61-ad847615440a service nova] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Received event network-vif-deleted-5e13d081-150b-4a13-a4c9-54ea78065ffe {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1177.007030] env[63345]: INFO nova.compute.manager [req-6493119d-1ca0-4d23-9ab1-cce50264db93 req-91a5ae86-3935-424d-be61-ad847615440a service nova] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Neutron deleted interface 5e13d081-150b-4a13-a4c9-54ea78065ffe; detaching it from the instance and deleting it from the info cache [ 1177.007030] env[63345]: DEBUG nova.network.neutron [req-6493119d-1ca0-4d23-9ab1-cce50264db93 req-91a5ae86-3935-424d-be61-ad847615440a service nova] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.124417] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfeb953f-4d9f-486d-9b8b-cd737b56b670 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.132803] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4420d23-701a-4e60-b207-1188978ae31a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.164075] env[63345]: DEBUG oslo_concurrency.lockutils [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.164346] env[63345]: DEBUG oslo_concurrency.lockutils [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.164632] env[63345]: INFO nova.compute.manager [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Attaching volume 6c50c9be-1b18-4a50-af11-ccaeacb957ca to /dev/sdb [ 1177.167244] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-015805c7-ee55-47b0-860b-1485b1d71f34 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.183085] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b369020-4bae-404b-923d-0db9f83ed3e5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.197785] env[63345]: DEBUG nova.compute.provider_tree [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1177.214532] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b97319-fe87-4b20-a63f-6e52cb625b1d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.221862] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ac7f41-bcfa-4b4f-81bc-b22bb15eed66 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.235475] env[63345]: DEBUG nova.virt.block_device [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Updating existing volume attachment record: f7257616-a0cc-419d-b4b7-b618dd1cd0bf {{(pid=63345) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1177.480409] env[63345]: DEBUG nova.network.neutron [-] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.509834] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-090d4ba9-a591-4411-bc2a-22217609b97d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.520742] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef13614a-9240-4262-a2d9-13dec3f44356 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.551421] env[63345]: DEBUG nova.compute.manager [req-6493119d-1ca0-4d23-9ab1-cce50264db93 req-91a5ae86-3935-424d-be61-ad847615440a service nova] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Detach interface failed, port_id=5e13d081-150b-4a13-a4c9-54ea78065ffe, reason: Instance 735c5f4f-98c1-4c75-bb82-66e49b0233f6 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1177.718727] env[63345]: ERROR nova.scheduler.client.report [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [req-c5187564-071c-47b1-a1d6-79203abd4344] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c5187564-071c-47b1-a1d6-79203abd4344"}]} [ 1177.736575] env[63345]: DEBUG nova.scheduler.client.report [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1177.751085] env[63345]: DEBUG nova.scheduler.client.report [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1177.751365] env[63345]: DEBUG nova.compute.provider_tree [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1177.763734] env[63345]: DEBUG nova.scheduler.client.report [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1177.785480] env[63345]: DEBUG nova.scheduler.client.report [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1177.885313] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968dc738-ec0e-459d-b916-31f848a0a6ca {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.894662] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6e7bc5-784b-4534-9a06-aebcbcd73202 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.924316] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65422523-95ce-4fd1-92c7-3c0f22158413 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.932236] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eace2e2-344c-49fb-8fe7-da8a3cba9d53 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.945606] env[63345]: DEBUG nova.compute.provider_tree [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1177.983425] env[63345]: INFO nova.compute.manager [-] [instance: 735c5f4f-98c1-4c75-bb82-66e49b0233f6] Took 1.45 seconds to deallocate network for instance. [ 1178.476409] env[63345]: DEBUG nova.scheduler.client.report [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 171 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1178.476722] env[63345]: DEBUG nova.compute.provider_tree [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 171 to 172 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1178.476890] env[63345]: DEBUG nova.compute.provider_tree [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1178.490707] env[63345]: DEBUG oslo_concurrency.lockutils [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.490247] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.477s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.493224] env[63345]: DEBUG oslo_concurrency.lockutils [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.003s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.493464] env[63345]: DEBUG nova.objects.instance [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lazy-loading 'resources' on Instance uuid 735c5f4f-98c1-4c75-bb82-66e49b0233f6 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1180.092043] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0fe14a-8250-4d89-8846-cc83e55af638 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.098768] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d76e103-11e9-4db9-81de-ec9ddb1df540 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.127490] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9340b089-2877-42d2-b41c-e3d79f21bc7a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.134538] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3704cc79-2c54-4e54-9e6b-ef9c67c5c2fc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.147441] env[63345]: DEBUG nova.compute.provider_tree [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1180.650157] env[63345]: DEBUG nova.scheduler.client.report [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1180.678937] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1180.679246] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.799158] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1180.799398] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1180.799550] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Starting heal instance info cache {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 1181.028872] env[63345]: INFO nova.compute.manager [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Swapping old allocation on dict_keys(['fc35ddde-c15e-4ab8-bf77-a06ae0805b57']) held by migration 8378a98b-90aa-43e1-a3c1-112f8846a508 for instance [ 1181.051356] env[63345]: DEBUG nova.scheduler.client.report [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Overwriting current allocation {'allocations': {'fc35ddde-c15e-4ab8-bf77-a06ae0805b57': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 172}}, 'project_id': '57e386920081487583ea143003aca8c4', 'user_id': 'b3754c2317404a48a80cfee69f1044ee', 'consumer_generation': 1} on consumer 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46 {{(pid=63345) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2033}} [ 1181.136934] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1181.137152] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.137337] env[63345]: DEBUG nova.network.neutron [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1181.155517] env[63345]: DEBUG oslo_concurrency.lockutils [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.662s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.174116] env[63345]: INFO nova.scheduler.client.report [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Deleted allocations for instance 735c5f4f-98c1-4c75-bb82-66e49b0233f6 [ 1181.183792] env[63345]: DEBUG nova.compute.utils [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1181.680861] env[63345]: DEBUG oslo_concurrency.lockutils [None req-74c35146-1686-478d-afbd-6298fe90a36f tempest-AttachVolumeNegativeTest-873190635 tempest-AttachVolumeNegativeTest-873190635-project-member] Lock "735c5f4f-98c1-4c75-bb82-66e49b0233f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.770s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.686559] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.794464] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Volume attach. Driver type: vmdk {{(pid=63345) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1181.794748] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226185', 'volume_id': '6c50c9be-1b18-4a50-af11-ccaeacb957ca', 'name': 'volume-6c50c9be-1b18-4a50-af11-ccaeacb957ca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '937ba0d6-bf23-45ae-8d75-cd7559e436f5', 'attached_at': '', 'detached_at': '', 'volume_id': '6c50c9be-1b18-4a50-af11-ccaeacb957ca', 'serial': '6c50c9be-1b18-4a50-af11-ccaeacb957ca'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1181.795663] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37773abc-f405-4893-b6b6-822bde179327 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.815246] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398158ae-e496-4e11-afb6-fffbeff5adbf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.841682] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] volume-6c50c9be-1b18-4a50-af11-ccaeacb957ca/volume-6c50c9be-1b18-4a50-af11-ccaeacb957ca.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1181.842163] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36e58516-c0e2-4592-846f-eec7e49d6deb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.855454] env[63345]: DEBUG nova.network.neutron [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating instance_info_cache with network_info: [{"id": "104a12d2-9632-4d24-a0e3-d4b18e907a58", "address": "fa:16:3e:a9:14:31", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap104a12d2-96", "ovs_interfaceid": "104a12d2-9632-4d24-a0e3-d4b18e907a58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.862220] env[63345]: DEBUG oslo_vmware.api [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1181.862220] env[63345]: value = "task-1017978" [ 1181.862220] env[63345]: _type = "Task" [ 1181.862220] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.870819] env[63345]: DEBUG oslo_vmware.api [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017978, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.347883] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "refresh_cache-148c961e-d260-4dbd-ad9f-52f94b072096" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1182.348283] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquired lock "refresh_cache-148c961e-d260-4dbd-ad9f-52f94b072096" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.348283] env[63345]: DEBUG nova.network.neutron [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Forcefully refreshing network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1182.358243] env[63345]: DEBUG oslo_concurrency.lockutils [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "refresh_cache-5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1182.359707] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f088eb-0fc1-4c3f-9d26-10e2000774c5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.368489] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ee4910-b088-4569-8d1e-53e21e765d28 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.373629] env[63345]: DEBUG oslo_vmware.api [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017978, 'name': ReconfigVM_Task, 'duration_secs': 0.327574} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.374199] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Reconfigured VM instance instance-00000073 to attach disk [datastore2] volume-6c50c9be-1b18-4a50-af11-ccaeacb957ca/volume-6c50c9be-1b18-4a50-af11-ccaeacb957ca.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1182.379056] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f111d96-8760-44a7-a52b-351a1812f6b2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.398532] env[63345]: DEBUG oslo_vmware.api [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1182.398532] env[63345]: value = "task-1017979" [ 1182.398532] env[63345]: _type = "Task" [ 1182.398532] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.406138] env[63345]: DEBUG oslo_vmware.api [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017979, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.745231] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.745577] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.745793] env[63345]: INFO nova.compute.manager [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Attaching volume 8a1a6d39-eb94-46e2-a4c3-0290f64a9afe to /dev/sdb [ 1182.775885] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85bb0ed7-7a99-46ac-9be2-ae53d414e374 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.782679] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f13f76-b93c-4bb3-9a40-2aeb8696ab09 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.795477] env[63345]: DEBUG nova.virt.block_device [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Updating existing volume attachment record: c528932a-f6b5-49c9-ba9b-263447c77d0e {{(pid=63345) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1182.907405] env[63345]: DEBUG oslo_vmware.api [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017979, 'name': ReconfigVM_Task, 'duration_secs': 0.148824} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.907993] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226185', 'volume_id': '6c50c9be-1b18-4a50-af11-ccaeacb957ca', 'name': 'volume-6c50c9be-1b18-4a50-af11-ccaeacb957ca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '937ba0d6-bf23-45ae-8d75-cd7559e436f5', 'attached_at': '', 'detached_at': '', 'volume_id': '6c50c9be-1b18-4a50-af11-ccaeacb957ca', 'serial': '6c50c9be-1b18-4a50-af11-ccaeacb957ca'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1183.472232] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1183.472514] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98b37399-1d87-4ae6-b2cc-f383ea8077eb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.479764] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1183.479764] env[63345]: value = "task-1017982" [ 1183.479764] env[63345]: _type = "Task" [ 1183.479764] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.488595] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017982, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.560487] env[63345]: DEBUG nova.network.neutron [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Updating instance_info_cache with network_info: [{"id": "f2837ec1-0df3-454a-bc68-fb0ca9562eb4", "address": "fa:16:3e:85:d4:0e", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2837ec1-0d", "ovs_interfaceid": "f2837ec1-0df3-454a-bc68-fb0ca9562eb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1183.951584] env[63345]: DEBUG nova.objects.instance [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lazy-loading 'flavor' on Instance uuid 937ba0d6-bf23-45ae-8d75-cd7559e436f5 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1183.989124] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017982, 'name': PowerOffVM_Task, 'duration_secs': 0.23696} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.989400] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1183.990063] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1183.990276] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1183.990429] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1183.990619] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1183.990770] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1183.990924] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1183.991143] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1183.991310] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1183.991481] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1183.991645] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1183.991821] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1183.996968] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-788e206c-c24d-4704-aff7-0e08d3effe33 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.011800] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1184.011800] env[63345]: value = "task-1017983" [ 1184.011800] env[63345]: _type = "Task" [ 1184.011800] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.020851] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017983, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.064007] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Releasing lock "refresh_cache-148c961e-d260-4dbd-ad9f-52f94b072096" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1184.064268] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Updated the network info_cache for instance {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 1184.064483] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1184.064644] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1184.065097] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1184.065305] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1184.065464] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1184.065617] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1184.065755] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63345) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 1184.065901] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager.update_available_resource {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1184.458435] env[63345]: DEBUG oslo_concurrency.lockutils [None req-49cda40c-58df-4b95-b620-3a78081ad711 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.294s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.521102] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017983, 'name': ReconfigVM_Task, 'duration_secs': 0.157796} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.521913] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92e1d2d-dcfa-4fb0-a051-d7fd48bb4714 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.542302] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1184.542504] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1184.542693] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1184.542908] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1184.543082] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1184.543240] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1184.543552] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1184.543639] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1184.543773] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1184.543938] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1184.544130] env[63345]: DEBUG nova.virt.hardware [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1184.544904] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c42ef4a4-a00f-4a52-83b9-2f241e5b1148 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.549907] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1184.549907] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52543119-467e-12dd-b749-19c59aa146a0" [ 1184.549907] env[63345]: _type = "Task" [ 1184.549907] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.557204] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52543119-467e-12dd-b749-19c59aa146a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.570213] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.570422] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.570591] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.570746] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63345) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1184.571532] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d67b230-0147-43ef-b9dd-da35357f3d02 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.578527] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493c433e-82b7-44ba-99b8-78b7d0766046 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.591619] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-710b5fd0-9f08-4223-9d25-f961b8796455 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.597561] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5ee652-bdf9-4a14-98bf-af86f0ab1a5d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.626803] env[63345]: INFO nova.compute.manager [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Rescuing [ 1184.627064] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1184.627230] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.627401] env[63345]: DEBUG nova.network.neutron [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1184.628594] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180568MB free_disk=187GB free_vcpus=48 pci_devices=None {{(pid=63345) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1184.628746] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.628941] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.059840] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52543119-467e-12dd-b749-19c59aa146a0, 'name': SearchDatastore_Task, 'duration_secs': 0.007415} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.065357] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Reconfiguring VM instance instance-00000071 to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1185.065654] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c675065-dab2-4da3-a88b-329bc4731704 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.083231] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1185.083231] env[63345]: value = "task-1017985" [ 1185.083231] env[63345]: _type = "Task" [ 1185.083231] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.091053] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017985, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.348526] env[63345]: DEBUG nova.network.neutron [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Updating instance_info_cache with network_info: [{"id": "4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b", "address": "fa:16:3e:0e:aa:f1", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4244898c-6e", "ovs_interfaceid": "4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.593208] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017985, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.661241] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 148c961e-d260-4dbd-ad9f-52f94b072096 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1185.661479] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1185.661657] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 937ba0d6-bf23-45ae-8d75-cd7559e436f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1185.661833] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 6b6ce545-0eca-4ef2-a859-c1e8ef978150 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1185.662013] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1185.662286] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1185.662489] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1185.728268] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c25f070-037a-4d62-bbbb-0a3fbbfb4eff {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.735464] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a0c0b0-1508-4057-a550-11abf77a2387 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.765358] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa879af-d9e5-4706-a29b-c878c21c413b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.772235] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9abcfa-b703-417c-87b3-74841bcac7f8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.784859] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1185.852277] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Releasing lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1186.094045] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017985, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.305088] env[63345]: ERROR nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [req-a01a92cb-73b4-431f-bffa-867b62433aba] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID fc35ddde-c15e-4ab8-bf77-a06ae0805b57. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a01a92cb-73b4-431f-bffa-867b62433aba"}]} [ 1186.321055] env[63345]: DEBUG nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Refreshing inventories for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1186.332355] env[63345]: DEBUG nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Updating ProviderTree inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1186.332532] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1186.342345] env[63345]: DEBUG nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Refreshing aggregate associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, aggregates: None {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1186.358031] env[63345]: DEBUG nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Refreshing trait associations for resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=63345) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1186.427355] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4359440b-86d8-4106-bfae-8a3b67537733 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.434215] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4da1a6c-c91f-4314-8519-f186c442d66a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.464240] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3079f8c8-ba88-4357-b5ee-f043989ab570 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.471743] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9841bcff-9a56-413a-adb5-62ceb67bff74 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.486541] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1186.594100] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017985, 'name': ReconfigVM_Task, 'duration_secs': 1.212322} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.594486] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Reconfigured VM instance instance-00000071 to detach disk 2000 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1186.596069] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e48ea9ab-498f-49b0-bbda-cfda4cd340ae {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.621366] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46/5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1186.621702] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-511e320f-88d5-49fd-aecf-1754074d1449 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.641211] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1186.641211] env[63345]: value = "task-1017987" [ 1186.641211] env[63345]: _type = "Task" [ 1186.641211] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.652202] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017987, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.883800] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1186.884165] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5934beee-6442-4af5-8c49-7e331269405a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.891895] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1186.891895] env[63345]: value = "task-1017988" [ 1186.891895] env[63345]: _type = "Task" [ 1186.891895] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.900026] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017988, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.017841] env[63345]: DEBUG nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 173 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1187.018129] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 173 to 174 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1187.018299] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 186, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1187.151139] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017987, 'name': ReconfigVM_Task, 'duration_secs': 0.31939} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.151437] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Reconfigured VM instance instance-00000071 to attach disk [datastore2] 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46/5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1187.152264] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9846bfd-00ed-483b-ba4a-ff9eb2dc140d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.173033] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead81d06-7ce7-4bdb-a95c-5405f6d7b303 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.192887] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d8524d-a0fb-424c-abed-e76869660c93 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.214280] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ff4446-0e2f-473c-9a1d-102d8b2fba2e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.221061] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1187.221308] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed245fb2-ac96-4578-87f7-b0a496738d12 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.227699] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1187.227699] env[63345]: value = "task-1017989" [ 1187.227699] env[63345]: _type = "Task" [ 1187.227699] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.234538] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017989, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.338284] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Volume attach. Driver type: vmdk {{(pid=63345) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1187.338642] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226186', 'volume_id': '8a1a6d39-eb94-46e2-a4c3-0290f64a9afe', 'name': 'volume-8a1a6d39-eb94-46e2-a4c3-0290f64a9afe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6b6ce545-0eca-4ef2-a859-c1e8ef978150', 'attached_at': '', 'detached_at': '', 'volume_id': '8a1a6d39-eb94-46e2-a4c3-0290f64a9afe', 'serial': '8a1a6d39-eb94-46e2-a4c3-0290f64a9afe'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1187.339620] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e058b0-9ab8-4fb2-8b21-9f5f5786965b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.355768] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae3f6ce-b533-4df0-936c-bc3456a44ff3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.379278] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] volume-8a1a6d39-eb94-46e2-a4c3-0290f64a9afe/volume-8a1a6d39-eb94-46e2-a4c3-0290f64a9afe.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1187.379515] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d438399-fead-4bba-b4d8-5aef483a3eb0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.396995] env[63345]: DEBUG oslo_vmware.api [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1187.396995] env[63345]: value = "task-1017990" [ 1187.396995] env[63345]: _type = "Task" [ 1187.396995] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.400041] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017988, 'name': PowerOffVM_Task, 'duration_secs': 0.203558} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.403195] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1187.404018] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8bedc58-b907-4014-b066-c46d884d455b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.427742] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef1e6a0-d604-44bc-9cf7-3f0c33c279e9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.430525] env[63345]: DEBUG oslo_vmware.api [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017990, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.455534] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1187.455835] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-968ad454-9e34-4173-a866-93638c7fb66b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.462009] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1187.462009] env[63345]: value = "task-1017991" [ 1187.462009] env[63345]: _type = "Task" [ 1187.462009] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.469473] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017991, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.523470] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63345) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1187.523673] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.895s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1187.739038] env[63345]: DEBUG oslo_vmware.api [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017989, 'name': PowerOnVM_Task, 'duration_secs': 0.375695} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.739330] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1187.909797] env[63345]: DEBUG oslo_vmware.api [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017990, 'name': ReconfigVM_Task, 'duration_secs': 0.3578} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.910035] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Reconfigured VM instance instance-00000075 to attach disk [datastore2] volume-8a1a6d39-eb94-46e2-a4c3-0290f64a9afe/volume-8a1a6d39-eb94-46e2-a4c3-0290f64a9afe.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1187.914599] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa13a5f1-d094-4e82-bc9d-d242c3454773 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.930242] env[63345]: DEBUG oslo_vmware.api [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1187.930242] env[63345]: value = "task-1017992" [ 1187.930242] env[63345]: _type = "Task" [ 1187.930242] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.937917] env[63345]: DEBUG oslo_vmware.api [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017992, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.972211] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] VM already powered off {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1187.972448] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1187.972741] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.972905] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.973104] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1187.973352] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-187ce49c-32cf-493b-89c6-9a8ff136ad12 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.983076] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1187.983262] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1187.983940] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f22557f5-11a5-4536-9c93-6f4d2abb3eda {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.988500] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1187.988500] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52a6bc51-95b8-b44a-6da0-869e7fdab311" [ 1187.988500] env[63345]: _type = "Task" [ 1187.988500] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.995680] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a6bc51-95b8-b44a-6da0-869e7fdab311, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.442813] env[63345]: DEBUG oslo_vmware.api [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1017992, 'name': ReconfigVM_Task, 'duration_secs': 0.148653} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.443254] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226186', 'volume_id': '8a1a6d39-eb94-46e2-a4c3-0290f64a9afe', 'name': 'volume-8a1a6d39-eb94-46e2-a4c3-0290f64a9afe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6b6ce545-0eca-4ef2-a859-c1e8ef978150', 'attached_at': '', 'detached_at': '', 'volume_id': '8a1a6d39-eb94-46e2-a4c3-0290f64a9afe', 'serial': '8a1a6d39-eb94-46e2-a4c3-0290f64a9afe'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1188.499171] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a6bc51-95b8-b44a-6da0-869e7fdab311, 'name': SearchDatastore_Task, 'duration_secs': 0.025708} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.499949] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71085218-4399-495b-8a99-284f6c1d4a02 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.504638] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1188.504638] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d5de6c-94a9-4f45-ab32-778af5024040" [ 1188.504638] env[63345]: _type = "Task" [ 1188.504638] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.511772] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d5de6c-94a9-4f45-ab32-778af5024040, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.778350] env[63345]: INFO nova.compute.manager [None req-d450002f-55b8-47be-b52d-dbd55b91e479 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating instance to original state: 'active' [ 1189.014808] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d5de6c-94a9-4f45-ab32-778af5024040, 'name': SearchDatastore_Task, 'duration_secs': 0.010291} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.015106] env[63345]: DEBUG oslo_concurrency.lockutils [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1189.015379] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 937ba0d6-bf23-45ae-8d75-cd7559e436f5/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk. {{(pid=63345) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1189.015662] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14d7a1e4-97f4-4e13-b222-db4ae23c764f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.022733] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1189.022733] env[63345]: value = "task-1017994" [ 1189.022733] env[63345]: _type = "Task" [ 1189.022733] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.029914] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017994, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.478632] env[63345]: DEBUG nova.objects.instance [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lazy-loading 'flavor' on Instance uuid 6b6ce545-0eca-4ef2-a859-c1e8ef978150 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1189.532610] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017994, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.443126} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.532901] env[63345]: INFO nova.virt.vmwareapi.ds_util [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 937ba0d6-bf23-45ae-8d75-cd7559e436f5/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk. [ 1189.533708] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7426278f-c3ea-4e96-b01e-6eb0b0c12c75 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.562705] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] 937ba0d6-bf23-45ae-8d75-cd7559e436f5/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1189.563418] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-634be4fb-1d42-432c-8b2f-74e7157c0fb7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.582158] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1189.582158] env[63345]: value = "task-1017995" [ 1189.582158] env[63345]: _type = "Task" [ 1189.582158] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.589970] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017995, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.984512] env[63345]: DEBUG oslo_concurrency.lockutils [None req-5fe704c7-f4e6-44e8-b493-1f384004be49 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.239s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1190.092318] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017995, 'name': ReconfigVM_Task, 'duration_secs': 0.341135} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.092613] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Reconfigured VM instance instance-00000073 to attach disk [datastore2] 937ba0d6-bf23-45ae-8d75-cd7559e436f5/2ff49e1b-8f44-4332-bba9-777d55ff62c4-rescue.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1190.093541] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382785e6-efaf-462e-ba92-5ad58714d5d8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.120407] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1106f7e-cf69-43b9-92eb-2cfda0e0f430 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.134870] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1190.134870] env[63345]: value = "task-1017996" [ 1190.134870] env[63345]: _type = "Task" [ 1190.134870] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.146395] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017996, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.243228] env[63345]: DEBUG oslo_concurrency.lockutils [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.243514] env[63345]: DEBUG oslo_concurrency.lockutils [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.243745] env[63345]: DEBUG oslo_concurrency.lockutils [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.243941] env[63345]: DEBUG oslo_concurrency.lockutils [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.244186] env[63345]: DEBUG oslo_concurrency.lockutils [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1190.246652] env[63345]: INFO nova.compute.manager [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Terminating instance [ 1190.645429] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017996, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.750591] env[63345]: DEBUG nova.compute.manager [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1190.750870] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1190.751176] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-05a318bd-069f-434d-8dce-d10e664c2d62 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.758908] env[63345]: DEBUG oslo_vmware.api [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1190.758908] env[63345]: value = "task-1017997" [ 1190.758908] env[63345]: _type = "Task" [ 1190.758908] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.766790] env[63345]: DEBUG oslo_vmware.api [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017997, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.809779] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.810036] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1191.145152] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017996, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.267881] env[63345]: DEBUG oslo_vmware.api [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017997, 'name': PowerOffVM_Task, 'duration_secs': 0.237231} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.268200] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1191.268420] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Volume detach. Driver type: vmdk {{(pid=63345) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1191.268615] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226182', 'volume_id': 'd16e80cf-439a-4033-8575-b9e5efa65dd8', 'name': 'volume-d16e80cf-439a-4033-8575-b9e5efa65dd8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46', 'attached_at': '2024-09-30T09:45:26.000000', 'detached_at': '', 'volume_id': 'd16e80cf-439a-4033-8575-b9e5efa65dd8', 'serial': 'd16e80cf-439a-4033-8575-b9e5efa65dd8'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1191.269370] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6a0ad9-d6f7-4474-a602-feef1cd515ff {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.289602] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbca9c5-08ed-4434-a5c5-c51ad02f143e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.295824] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb786d1e-405d-49f6-82c6-1920a71026d5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.315597] env[63345]: DEBUG nova.compute.utils [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1191.317336] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29512530-908f-40a9-b5de-14ef4f65dfd5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.332900] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] The volume has not been displaced from its original location: [datastore1] volume-d16e80cf-439a-4033-8575-b9e5efa65dd8/volume-d16e80cf-439a-4033-8575-b9e5efa65dd8.vmdk. No consolidation needed. {{(pid=63345) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1191.337967] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Reconfiguring VM instance instance-00000071 to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1191.338718] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7e798b7-a39a-4d83-bcaa-8f02693d52cd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.355290] env[63345]: DEBUG oslo_vmware.api [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1191.355290] env[63345]: value = "task-1017998" [ 1191.355290] env[63345]: _type = "Task" [ 1191.355290] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.364402] env[63345]: DEBUG oslo_vmware.api [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017998, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.646716] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017996, 'name': ReconfigVM_Task, 'duration_secs': 1.166047} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.647103] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1191.647188] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb8c3c22-1ab3-4b56-89b7-2d9d8f6e60a1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.652131] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1191.652131] env[63345]: value = "task-1017999" [ 1191.652131] env[63345]: _type = "Task" [ 1191.652131] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.659063] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017999, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.821365] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1191.864604] env[63345]: DEBUG oslo_vmware.api [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1017998, 'name': ReconfigVM_Task, 'duration_secs': 0.220297} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.864926] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Reconfigured VM instance instance-00000071 to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1191.869778] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08519f44-1286-4b45-8dc1-58a0f60a3d37 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.885374] env[63345]: DEBUG oslo_vmware.api [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1191.885374] env[63345]: value = "task-1018000" [ 1191.885374] env[63345]: _type = "Task" [ 1191.885374] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.893752] env[63345]: DEBUG oslo_vmware.api [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018000, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.161902] env[63345]: DEBUG oslo_vmware.api [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1017999, 'name': PowerOnVM_Task, 'duration_secs': 0.399529} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.162191] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1192.165052] env[63345]: DEBUG nova.compute.manager [None req-9b03ab62-0489-44dc-a1e4-a1732d16d969 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1192.165816] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32fe2706-4c1b-40e6-8497-d2e6dfa6aac9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.394385] env[63345]: DEBUG oslo_vmware.api [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018000, 'name': ReconfigVM_Task, 'duration_secs': 0.147576} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.394694] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226182', 'volume_id': 'd16e80cf-439a-4033-8575-b9e5efa65dd8', 'name': 'volume-d16e80cf-439a-4033-8575-b9e5efa65dd8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46', 'attached_at': '2024-09-30T09:45:26.000000', 'detached_at': '', 'volume_id': 'd16e80cf-439a-4033-8575-b9e5efa65dd8', 'serial': 'd16e80cf-439a-4033-8575-b9e5efa65dd8'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1192.395382] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1192.396730] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f919ee18-2b4a-4506-b97f-635d7fd13524 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.404721] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1192.405042] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-de3b5a44-9794-4348-bc6f-51c1363d8a77 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.479361] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1192.479632] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1192.479861] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Deleting the datastore file [datastore2] 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1192.480494] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-52118ab9-e04b-49ce-ab95-48b971500003 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.486958] env[63345]: DEBUG oslo_vmware.api [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1192.486958] env[63345]: value = "task-1018002" [ 1192.486958] env[63345]: _type = "Task" [ 1192.486958] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.494854] env[63345]: DEBUG oslo_vmware.api [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018002, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.885036] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.885390] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.885390] env[63345]: INFO nova.compute.manager [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Attaching volume 2e1de061-987c-4b81-bc79-4a1a1952cb1a to /dev/sdc [ 1192.920849] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c839b6c-39f1-4499-958e-7026fb12bb8e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.928087] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d52301-14ac-4350-8c98-5a151814a693 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.942546] env[63345]: DEBUG nova.virt.block_device [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Updating existing volume attachment record: d0df36a4-1cf7-43d3-a3b1-8e1056030b66 {{(pid=63345) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1192.996802] env[63345]: DEBUG oslo_vmware.api [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018002, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157707} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.997169] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1192.997404] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1192.997655] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1192.997876] env[63345]: INFO nova.compute.manager [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Took 2.25 seconds to destroy the instance on the hypervisor. [ 1192.998241] env[63345]: DEBUG oslo.service.loopingcall [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1192.998480] env[63345]: DEBUG nova.compute.manager [-] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1192.998577] env[63345]: DEBUG nova.network.neutron [-] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1193.590897] env[63345]: DEBUG nova.compute.manager [req-18be0618-828a-4f56-b13e-f9a791a4d276 req-b2d02625-a323-4814-87c6-5a85c2a95171 service nova] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Received event network-vif-deleted-104a12d2-9632-4d24-a0e3-d4b18e907a58 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1193.591131] env[63345]: INFO nova.compute.manager [req-18be0618-828a-4f56-b13e-f9a791a4d276 req-b2d02625-a323-4814-87c6-5a85c2a95171 service nova] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Neutron deleted interface 104a12d2-9632-4d24-a0e3-d4b18e907a58; detaching it from the instance and deleting it from the info cache [ 1193.591318] env[63345]: DEBUG nova.network.neutron [req-18be0618-828a-4f56-b13e-f9a791a4d276 req-b2d02625-a323-4814-87c6-5a85c2a95171 service nova] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.896513] env[63345]: INFO nova.compute.manager [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Unrescuing [ 1193.896874] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1193.896978] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquired lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.897179] env[63345]: DEBUG nova.network.neutron [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1194.064234] env[63345]: DEBUG nova.network.neutron [-] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.093695] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1316d12a-9a77-4818-9f49-6403fa4ba524 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.104290] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951dd138-cb3b-4340-a6ee-f93a835dfa98 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.130709] env[63345]: DEBUG nova.compute.manager [req-18be0618-828a-4f56-b13e-f9a791a4d276 req-b2d02625-a323-4814-87c6-5a85c2a95171 service nova] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Detach interface failed, port_id=104a12d2-9632-4d24-a0e3-d4b18e907a58, reason: Instance 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1194.567656] env[63345]: INFO nova.compute.manager [-] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Took 1.57 seconds to deallocate network for instance. [ 1194.628676] env[63345]: DEBUG nova.network.neutron [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Updating instance_info_cache with network_info: [{"id": "4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b", "address": "fa:16:3e:0e:aa:f1", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4244898c-6e", "ovs_interfaceid": "4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.132336] env[63345]: DEBUG oslo_concurrency.lockutils [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Releasing lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1195.133106] env[63345]: DEBUG nova.objects.instance [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lazy-loading 'flavor' on Instance uuid 937ba0d6-bf23-45ae-8d75-cd7559e436f5 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1195.150120] env[63345]: INFO nova.compute.manager [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46] Took 0.58 seconds to detach 1 volumes for instance. [ 1195.640013] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ddba6bb-ee52-4b58-a86b-ab55b92f3254 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.663815] env[63345]: DEBUG oslo_concurrency.lockutils [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.664093] env[63345]: DEBUG oslo_concurrency.lockutils [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.664325] env[63345]: DEBUG nova.objects.instance [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lazy-loading 'resources' on Instance uuid 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1195.665659] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1195.666153] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6407557e-9d85-453d-b10c-fb643038b542 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.673049] env[63345]: DEBUG oslo_vmware.api [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1195.673049] env[63345]: value = "task-1018005" [ 1195.673049] env[63345]: _type = "Task" [ 1195.673049] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.681146] env[63345]: DEBUG oslo_vmware.api [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018005, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.182558] env[63345]: DEBUG oslo_vmware.api [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018005, 'name': PowerOffVM_Task, 'duration_secs': 0.2302} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.182873] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1196.188116] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Reconfiguring VM instance instance-00000073 to detach disk 2002 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1196.190839] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8907c40c-a469-479d-b97f-35702cfd7bb7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.208189] env[63345]: DEBUG oslo_vmware.api [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1196.208189] env[63345]: value = "task-1018006" [ 1196.208189] env[63345]: _type = "Task" [ 1196.208189] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.217238] env[63345]: DEBUG oslo_vmware.api [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018006, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.254489] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90eb4e68-1e2a-4bb6-9cc7-50deecf8dbc2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.261684] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef981a4-687e-40f1-bca7-6289455aaafd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.292928] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08243cc3-f9fc-41cc-9338-7b89b4aa559c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.300105] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a8c350-c24a-48df-9021-c3a6edffd69a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.313303] env[63345]: DEBUG nova.compute.provider_tree [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1196.718051] env[63345]: DEBUG oslo_vmware.api [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018006, 'name': ReconfigVM_Task, 'duration_secs': 0.254544} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.718507] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Reconfigured VM instance instance-00000073 to detach disk 2002 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1196.718600] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1196.718805] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3b3a1b9-20e4-420f-ab01-95b2630887e5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.725415] env[63345]: DEBUG oslo_vmware.api [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1196.725415] env[63345]: value = "task-1018007" [ 1196.725415] env[63345]: _type = "Task" [ 1196.725415] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.733024] env[63345]: DEBUG oslo_vmware.api [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018007, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.847693] env[63345]: DEBUG nova.scheduler.client.report [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Updated inventory for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with generation 174 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1196.849018] env[63345]: DEBUG nova.compute.provider_tree [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Updating resource provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 generation from 174 to 175 during operation: update_inventory {{(pid=63345) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1196.849018] env[63345]: DEBUG nova.compute.provider_tree [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Updating inventory in ProviderTree for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1197.235394] env[63345]: DEBUG oslo_vmware.api [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018007, 'name': PowerOnVM_Task, 'duration_secs': 0.375434} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.235765] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1197.235951] env[63345]: DEBUG nova.compute.manager [None req-dae43263-fadc-4cad-8106-b7e019d4987a tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1197.236716] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8d091a-fc64-438e-b2b1-8f92d70bbc05 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.353647] env[63345]: DEBUG oslo_concurrency.lockutils [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.689s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.373184] env[63345]: INFO nova.scheduler.client.report [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Deleted allocations for instance 5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46 [ 1197.491040] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Volume attach. Driver type: vmdk {{(pid=63345) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1197.491312] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226187', 'volume_id': '2e1de061-987c-4b81-bc79-4a1a1952cb1a', 'name': 'volume-2e1de061-987c-4b81-bc79-4a1a1952cb1a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6b6ce545-0eca-4ef2-a859-c1e8ef978150', 'attached_at': '', 'detached_at': '', 'volume_id': '2e1de061-987c-4b81-bc79-4a1a1952cb1a', 'serial': '2e1de061-987c-4b81-bc79-4a1a1952cb1a'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1197.492210] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd7efb9c-902d-412d-92ef-0c6832019777 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.508277] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a90d939-6b91-45e5-98a6-e7c068ff2ff8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.533981] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] volume-2e1de061-987c-4b81-bc79-4a1a1952cb1a/volume-2e1de061-987c-4b81-bc79-4a1a1952cb1a.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1197.534248] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd6c6c1c-eabf-46b5-b62e-553edd3abe1c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.551448] env[63345]: DEBUG oslo_vmware.api [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1197.551448] env[63345]: value = "task-1018008" [ 1197.551448] env[63345]: _type = "Task" [ 1197.551448] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.558496] env[63345]: DEBUG oslo_vmware.api [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018008, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.880800] env[63345]: DEBUG oslo_concurrency.lockutils [None req-42fb55c9-0383-4fbc-bc18-c20f719a4dcd tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "5a53e0f1-82a2-4a8f-b227-bc3dfda5aa46" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.637s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1198.062985] env[63345]: DEBUG oslo_vmware.api [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018008, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.562132] env[63345]: DEBUG oslo_vmware.api [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018008, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.597119] env[63345]: DEBUG nova.compute.manager [req-298cb58c-ca98-469a-abe8-c3b7b55ca7d8 req-de0d4a9b-84cf-4047-9504-120b03ed68fd service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Received event network-changed-4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1198.597119] env[63345]: DEBUG nova.compute.manager [req-298cb58c-ca98-469a-abe8-c3b7b55ca7d8 req-de0d4a9b-84cf-4047-9504-120b03ed68fd service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Refreshing instance network info cache due to event network-changed-4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1198.597393] env[63345]: DEBUG oslo_concurrency.lockutils [req-298cb58c-ca98-469a-abe8-c3b7b55ca7d8 req-de0d4a9b-84cf-4047-9504-120b03ed68fd service nova] Acquiring lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1198.597551] env[63345]: DEBUG oslo_concurrency.lockutils [req-298cb58c-ca98-469a-abe8-c3b7b55ca7d8 req-de0d4a9b-84cf-4047-9504-120b03ed68fd service nova] Acquired lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.597723] env[63345]: DEBUG nova.network.neutron [req-298cb58c-ca98-469a-abe8-c3b7b55ca7d8 req-de0d4a9b-84cf-4047-9504-120b03ed68fd service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Refreshing network info cache for port 4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1198.862723] env[63345]: DEBUG oslo_concurrency.lockutils [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "53f759e4-0398-40ef-823a-3028d1ac82b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.862968] env[63345]: DEBUG oslo_concurrency.lockutils [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "53f759e4-0398-40ef-823a-3028d1ac82b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.063022] env[63345]: DEBUG oslo_vmware.api [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018008, 'name': ReconfigVM_Task, 'duration_secs': 1.360549} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.063350] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Reconfigured VM instance instance-00000075 to attach disk [datastore2] volume-2e1de061-987c-4b81-bc79-4a1a1952cb1a/volume-2e1de061-987c-4b81-bc79-4a1a1952cb1a.vmdk or device None with type thin {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1199.068107] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08d903ad-27a9-42a6-a630-8f2b94e274a6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.083052] env[63345]: DEBUG oslo_vmware.api [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1199.083052] env[63345]: value = "task-1018009" [ 1199.083052] env[63345]: _type = "Task" [ 1199.083052] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.090600] env[63345]: DEBUG oslo_vmware.api [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018009, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.318454] env[63345]: DEBUG nova.network.neutron [req-298cb58c-ca98-469a-abe8-c3b7b55ca7d8 req-de0d4a9b-84cf-4047-9504-120b03ed68fd service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Updated VIF entry in instance network info cache for port 4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1199.318927] env[63345]: DEBUG nova.network.neutron [req-298cb58c-ca98-469a-abe8-c3b7b55ca7d8 req-de0d4a9b-84cf-4047-9504-120b03ed68fd service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Updating instance_info_cache with network_info: [{"id": "4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b", "address": "fa:16:3e:0e:aa:f1", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4244898c-6e", "ovs_interfaceid": "4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.364974] env[63345]: DEBUG nova.compute.manager [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Starting instance... {{(pid=63345) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1199.592073] env[63345]: DEBUG oslo_vmware.api [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018009, 'name': ReconfigVM_Task, 'duration_secs': 0.181172} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.592428] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226187', 'volume_id': '2e1de061-987c-4b81-bc79-4a1a1952cb1a', 'name': 'volume-2e1de061-987c-4b81-bc79-4a1a1952cb1a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6b6ce545-0eca-4ef2-a859-c1e8ef978150', 'attached_at': '', 'detached_at': '', 'volume_id': '2e1de061-987c-4b81-bc79-4a1a1952cb1a', 'serial': '2e1de061-987c-4b81-bc79-4a1a1952cb1a'} {{(pid=63345) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1199.822143] env[63345]: DEBUG oslo_concurrency.lockutils [req-298cb58c-ca98-469a-abe8-c3b7b55ca7d8 req-de0d4a9b-84cf-4047-9504-120b03ed68fd service nova] Releasing lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1199.886644] env[63345]: DEBUG oslo_concurrency.lockutils [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.886908] env[63345]: DEBUG oslo_concurrency.lockutils [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.890036] env[63345]: INFO nova.compute.claims [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1200.620932] env[63345]: DEBUG nova.compute.manager [req-505bb37b-32a8-4a04-90f0-361ff370a6bd req-6f98c56e-370d-476b-93da-3a7e893a054c service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Received event network-changed-4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1200.621164] env[63345]: DEBUG nova.compute.manager [req-505bb37b-32a8-4a04-90f0-361ff370a6bd req-6f98c56e-370d-476b-93da-3a7e893a054c service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Refreshing instance network info cache due to event network-changed-4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1200.621384] env[63345]: DEBUG oslo_concurrency.lockutils [req-505bb37b-32a8-4a04-90f0-361ff370a6bd req-6f98c56e-370d-476b-93da-3a7e893a054c service nova] Acquiring lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1200.621536] env[63345]: DEBUG oslo_concurrency.lockutils [req-505bb37b-32a8-4a04-90f0-361ff370a6bd req-6f98c56e-370d-476b-93da-3a7e893a054c service nova] Acquired lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.621705] env[63345]: DEBUG nova.network.neutron [req-505bb37b-32a8-4a04-90f0-361ff370a6bd req-6f98c56e-370d-476b-93da-3a7e893a054c service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Refreshing network info cache for port 4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1200.628063] env[63345]: DEBUG nova.objects.instance [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lazy-loading 'flavor' on Instance uuid 6b6ce545-0eca-4ef2-a859-c1e8ef978150 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1200.961228] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4f23f5-0cfc-48f5-a8ca-13820cc29202 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.968568] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264a048b-77ec-4d4f-bd4c-25baea36a726 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.997713] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ace3f54-5b96-4d3a-b917-4ceee9ecf56d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.004998] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c9ec15-0d15-46dc-8f48-a49f03560736 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.018846] env[63345]: DEBUG nova.compute.provider_tree [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1201.133318] env[63345]: DEBUG oslo_concurrency.lockutils [None req-0dbd7c2d-3249-40a3-9d3a-5fcee73ad0e1 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.248s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.335958] env[63345]: DEBUG nova.network.neutron [req-505bb37b-32a8-4a04-90f0-361ff370a6bd req-6f98c56e-370d-476b-93da-3a7e893a054c service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Updated VIF entry in instance network info cache for port 4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1201.336367] env[63345]: DEBUG nova.network.neutron [req-505bb37b-32a8-4a04-90f0-361ff370a6bd req-6f98c56e-370d-476b-93da-3a7e893a054c service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Updating instance_info_cache with network_info: [{"id": "4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b", "address": "fa:16:3e:0e:aa:f1", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4244898c-6e", "ovs_interfaceid": "4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.522103] env[63345]: DEBUG nova.scheduler.client.report [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1201.578383] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.578634] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.840020] env[63345]: DEBUG oslo_concurrency.lockutils [req-505bb37b-32a8-4a04-90f0-361ff370a6bd req-6f98c56e-370d-476b-93da-3a7e893a054c service nova] Releasing lock "refresh_cache-937ba0d6-bf23-45ae-8d75-cd7559e436f5" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1202.027950] env[63345]: DEBUG oslo_concurrency.lockutils [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.141s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.028540] env[63345]: DEBUG nova.compute.manager [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Start building networks asynchronously for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1202.081507] env[63345]: INFO nova.compute.manager [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Detaching volume 8a1a6d39-eb94-46e2-a4c3-0290f64a9afe [ 1202.116096] env[63345]: INFO nova.virt.block_device [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Attempting to driver detach volume 8a1a6d39-eb94-46e2-a4c3-0290f64a9afe from mountpoint /dev/sdb [ 1202.116366] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Volume detach. Driver type: vmdk {{(pid=63345) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1202.116563] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226186', 'volume_id': '8a1a6d39-eb94-46e2-a4c3-0290f64a9afe', 'name': 'volume-8a1a6d39-eb94-46e2-a4c3-0290f64a9afe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6b6ce545-0eca-4ef2-a859-c1e8ef978150', 'attached_at': '', 'detached_at': '', 'volume_id': '8a1a6d39-eb94-46e2-a4c3-0290f64a9afe', 'serial': '8a1a6d39-eb94-46e2-a4c3-0290f64a9afe'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1202.117482] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5387b1-ca2a-4c18-89d4-ac5e3df9dd90 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.141209] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a4f88bf-ca71-4d37-a50e-2c78ef80afad {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.147817] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90ab77e-4d62-4995-b024-21e56c27a91d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.170427] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242e033b-0689-4ebe-9733-4c062c6bab8b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.184350] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] The volume has not been displaced from its original location: [datastore2] volume-8a1a6d39-eb94-46e2-a4c3-0290f64a9afe/volume-8a1a6d39-eb94-46e2-a4c3-0290f64a9afe.vmdk. No consolidation needed. {{(pid=63345) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1202.189459] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Reconfiguring VM instance instance-00000075 to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1202.189705] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e928cd99-5e86-4e64-9080-d0e5ed47e249 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.208694] env[63345]: DEBUG oslo_vmware.api [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1202.208694] env[63345]: value = "task-1018010" [ 1202.208694] env[63345]: _type = "Task" [ 1202.208694] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.217132] env[63345]: DEBUG oslo_vmware.api [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018010, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.533881] env[63345]: DEBUG nova.compute.utils [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Using /dev/sd instead of None {{(pid=63345) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1202.535540] env[63345]: DEBUG nova.compute.manager [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Allocating IP information in the background. {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1202.535753] env[63345]: DEBUG nova.network.neutron [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] allocate_for_instance() {{(pid=63345) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1202.573752] env[63345]: DEBUG nova.policy [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3754c2317404a48a80cfee69f1044ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57e386920081487583ea143003aca8c4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63345) authorize /opt/stack/nova/nova/policy.py:201}} [ 1202.718108] env[63345]: DEBUG oslo_vmware.api [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018010, 'name': ReconfigVM_Task, 'duration_secs': 0.244367} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.718375] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Reconfigured VM instance instance-00000075 to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1202.722989] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10591719-7d9b-4ab9-8011-069547da2d4a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.737707] env[63345]: DEBUG oslo_vmware.api [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1202.737707] env[63345]: value = "task-1018011" [ 1202.737707] env[63345]: _type = "Task" [ 1202.737707] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.745085] env[63345]: DEBUG oslo_vmware.api [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018011, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.839671] env[63345]: DEBUG nova.network.neutron [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Successfully created port: 4b3221f1-3c39-4726-b760-339e16d0d89e {{(pid=63345) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1203.039952] env[63345]: DEBUG nova.compute.manager [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Start building block device mappings for instance. {{(pid=63345) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1203.247313] env[63345]: DEBUG oslo_vmware.api [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018011, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.748916] env[63345]: DEBUG oslo_vmware.api [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018011, 'name': ReconfigVM_Task, 'duration_secs': 0.787753} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.749796] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226186', 'volume_id': '8a1a6d39-eb94-46e2-a4c3-0290f64a9afe', 'name': 'volume-8a1a6d39-eb94-46e2-a4c3-0290f64a9afe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6b6ce545-0eca-4ef2-a859-c1e8ef978150', 'attached_at': '', 'detached_at': '', 'volume_id': '8a1a6d39-eb94-46e2-a4c3-0290f64a9afe', 'serial': '8a1a6d39-eb94-46e2-a4c3-0290f64a9afe'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1204.050065] env[63345]: DEBUG nova.compute.manager [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Start spawning the instance on the hypervisor. {{(pid=63345) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1204.074527] env[63345]: DEBUG nova.virt.hardware [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T09:32:20Z,direct_url=,disk_format='vmdk',id=2ff49e1b-8f44-4332-bba9-777d55ff62c4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5dc99cc64e6c4d83928b309253a8df8d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T09:32:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1204.074809] env[63345]: DEBUG nova.virt.hardware [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1204.074999] env[63345]: DEBUG nova.virt.hardware [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1204.075229] env[63345]: DEBUG nova.virt.hardware [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1204.075388] env[63345]: DEBUG nova.virt.hardware [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1204.075559] env[63345]: DEBUG nova.virt.hardware [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1204.075754] env[63345]: DEBUG nova.virt.hardware [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1204.075948] env[63345]: DEBUG nova.virt.hardware [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1204.076180] env[63345]: DEBUG nova.virt.hardware [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1204.076359] env[63345]: DEBUG nova.virt.hardware [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1204.076543] env[63345]: DEBUG nova.virt.hardware [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1204.077473] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf283339-9f40-4a0f-a44b-c553c1672b54 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.086366] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9c608e-4f9e-47fb-af62-becc7c36879f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.184570] env[63345]: DEBUG nova.compute.manager [req-80d033c4-fbc7-4acc-920c-b29d9948684e req-c0697bc8-23b3-436f-b6fa-3b3083b1dac6 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Received event network-vif-plugged-4b3221f1-3c39-4726-b760-339e16d0d89e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1204.184807] env[63345]: DEBUG oslo_concurrency.lockutils [req-80d033c4-fbc7-4acc-920c-b29d9948684e req-c0697bc8-23b3-436f-b6fa-3b3083b1dac6 service nova] Acquiring lock "53f759e4-0398-40ef-823a-3028d1ac82b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.185112] env[63345]: DEBUG oslo_concurrency.lockutils [req-80d033c4-fbc7-4acc-920c-b29d9948684e req-c0697bc8-23b3-436f-b6fa-3b3083b1dac6 service nova] Lock "53f759e4-0398-40ef-823a-3028d1ac82b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.185245] env[63345]: DEBUG oslo_concurrency.lockutils [req-80d033c4-fbc7-4acc-920c-b29d9948684e req-c0697bc8-23b3-436f-b6fa-3b3083b1dac6 service nova] Lock "53f759e4-0398-40ef-823a-3028d1ac82b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1204.185423] env[63345]: DEBUG nova.compute.manager [req-80d033c4-fbc7-4acc-920c-b29d9948684e req-c0697bc8-23b3-436f-b6fa-3b3083b1dac6 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] No waiting events found dispatching network-vif-plugged-4b3221f1-3c39-4726-b760-339e16d0d89e {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1204.185595] env[63345]: WARNING nova.compute.manager [req-80d033c4-fbc7-4acc-920c-b29d9948684e req-c0697bc8-23b3-436f-b6fa-3b3083b1dac6 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Received unexpected event network-vif-plugged-4b3221f1-3c39-4726-b760-339e16d0d89e for instance with vm_state building and task_state spawning. [ 1204.290650] env[63345]: DEBUG nova.objects.instance [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lazy-loading 'flavor' on Instance uuid 6b6ce545-0eca-4ef2-a859-c1e8ef978150 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1204.747813] env[63345]: DEBUG nova.network.neutron [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Successfully updated port: 4b3221f1-3c39-4726-b760-339e16d0d89e {{(pid=63345) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1204.772906] env[63345]: DEBUG nova.compute.manager [req-b5724f08-7f17-4f25-bb46-9318bcc17a70 req-5f248f7f-29d9-4648-856e-3d16bbe08617 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Received event network-changed-4b3221f1-3c39-4726-b760-339e16d0d89e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1204.773102] env[63345]: DEBUG nova.compute.manager [req-b5724f08-7f17-4f25-bb46-9318bcc17a70 req-5f248f7f-29d9-4648-856e-3d16bbe08617 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Refreshing instance network info cache due to event network-changed-4b3221f1-3c39-4726-b760-339e16d0d89e. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1204.773318] env[63345]: DEBUG oslo_concurrency.lockutils [req-b5724f08-7f17-4f25-bb46-9318bcc17a70 req-5f248f7f-29d9-4648-856e-3d16bbe08617 service nova] Acquiring lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1204.773463] env[63345]: DEBUG oslo_concurrency.lockutils [req-b5724f08-7f17-4f25-bb46-9318bcc17a70 req-5f248f7f-29d9-4648-856e-3d16bbe08617 service nova] Acquired lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.773631] env[63345]: DEBUG nova.network.neutron [req-b5724f08-7f17-4f25-bb46-9318bcc17a70 req-5f248f7f-29d9-4648-856e-3d16bbe08617 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Refreshing network info cache for port 4b3221f1-3c39-4726-b760-339e16d0d89e {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1204.793587] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1204.795412] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1205.251342] env[63345]: DEBUG oslo_concurrency.lockutils [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1205.298233] env[63345]: DEBUG oslo_concurrency.lockutils [None req-7cdd983e-f1aa-4448-bf29-e7f01d0a3fc5 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.719s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.303884] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1205.304203] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Starting heal instance info cache {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 1205.306584] env[63345]: DEBUG nova.network.neutron [req-b5724f08-7f17-4f25-bb46-9318bcc17a70 req-5f248f7f-29d9-4648-856e-3d16bbe08617 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1205.386740] env[63345]: DEBUG nova.network.neutron [req-b5724f08-7f17-4f25-bb46-9318bcc17a70 req-5f248f7f-29d9-4648-856e-3d16bbe08617 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.839980] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "refresh_cache-83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1205.840160] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquired lock "refresh_cache-83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.840314] env[63345]: DEBUG nova.network.neutron [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Forcefully refreshing network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1205.843294] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e61b4a6f-5014-4bb9-bcfd-6f64800b9876 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.843523] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e61b4a6f-5014-4bb9-bcfd-6f64800b9876 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.889252] env[63345]: DEBUG oslo_concurrency.lockutils [req-b5724f08-7f17-4f25-bb46-9318bcc17a70 req-5f248f7f-29d9-4648-856e-3d16bbe08617 service nova] Releasing lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1205.889631] env[63345]: DEBUG oslo_concurrency.lockutils [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.889797] env[63345]: DEBUG nova.network.neutron [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1206.347323] env[63345]: INFO nova.compute.manager [None req-e61b4a6f-5014-4bb9-bcfd-6f64800b9876 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Detaching volume 2e1de061-987c-4b81-bc79-4a1a1952cb1a [ 1206.375366] env[63345]: INFO nova.virt.block_device [None req-e61b4a6f-5014-4bb9-bcfd-6f64800b9876 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Attempting to driver detach volume 2e1de061-987c-4b81-bc79-4a1a1952cb1a from mountpoint /dev/sdc [ 1206.375604] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-e61b4a6f-5014-4bb9-bcfd-6f64800b9876 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Volume detach. Driver type: vmdk {{(pid=63345) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1206.375791] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-e61b4a6f-5014-4bb9-bcfd-6f64800b9876 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226187', 'volume_id': '2e1de061-987c-4b81-bc79-4a1a1952cb1a', 'name': 'volume-2e1de061-987c-4b81-bc79-4a1a1952cb1a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6b6ce545-0eca-4ef2-a859-c1e8ef978150', 'attached_at': '', 'detached_at': '', 'volume_id': '2e1de061-987c-4b81-bc79-4a1a1952cb1a', 'serial': '2e1de061-987c-4b81-bc79-4a1a1952cb1a'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1206.376752] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df1cbbf-14ea-437a-8998-611602cdc6b5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.401823] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc231859-fffd-424d-8f88-350bf6cff65f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.409857] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc05118-3286-40f6-93fa-6e31ab5be4e9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.430134] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be51c97a-2016-49b2-8bfb-6333c9b4d564 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.433205] env[63345]: DEBUG nova.network.neutron [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Instance cache missing network info. {{(pid=63345) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1206.446906] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-e61b4a6f-5014-4bb9-bcfd-6f64800b9876 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] The volume has not been displaced from its original location: [datastore2] volume-2e1de061-987c-4b81-bc79-4a1a1952cb1a/volume-2e1de061-987c-4b81-bc79-4a1a1952cb1a.vmdk. No consolidation needed. {{(pid=63345) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1206.452358] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-e61b4a6f-5014-4bb9-bcfd-6f64800b9876 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Reconfiguring VM instance instance-00000075 to detach disk 2002 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1206.454588] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5dc58d22-d630-4b21-95c6-f598626f165a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.471942] env[63345]: DEBUG oslo_vmware.api [None req-e61b4a6f-5014-4bb9-bcfd-6f64800b9876 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1206.471942] env[63345]: value = "task-1018012" [ 1206.471942] env[63345]: _type = "Task" [ 1206.471942] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.479439] env[63345]: DEBUG oslo_vmware.api [None req-e61b4a6f-5014-4bb9-bcfd-6f64800b9876 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018012, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.580556] env[63345]: DEBUG nova.network.neutron [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Updating instance_info_cache with network_info: [{"id": "4b3221f1-3c39-4726-b760-339e16d0d89e", "address": "fa:16:3e:74:52:98", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b3221f1-3c", "ovs_interfaceid": "4b3221f1-3c39-4726-b760-339e16d0d89e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.981212] env[63345]: DEBUG oslo_vmware.api [None req-e61b4a6f-5014-4bb9-bcfd-6f64800b9876 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018012, 'name': ReconfigVM_Task, 'duration_secs': 0.21468} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.981474] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-e61b4a6f-5014-4bb9-bcfd-6f64800b9876 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Reconfigured VM instance instance-00000075 to detach disk 2002 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1206.986128] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ed0b965-773c-49fe-abbe-2f34418af50f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.000279] env[63345]: DEBUG oslo_vmware.api [None req-e61b4a6f-5014-4bb9-bcfd-6f64800b9876 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1207.000279] env[63345]: value = "task-1018013" [ 1207.000279] env[63345]: _type = "Task" [ 1207.000279] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.007808] env[63345]: DEBUG oslo_vmware.api [None req-e61b4a6f-5014-4bb9-bcfd-6f64800b9876 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018013, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.029586] env[63345]: DEBUG nova.network.neutron [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Updating instance_info_cache with network_info: [{"id": "24c75109-1060-4770-8c15-3bce8002f3e0", "address": "fa:16:3e:56:92:00", "network": {"id": "04c13a40-3e24-45e3-b045-adb1f5b0ad03", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1754460710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a7aaf150ea243b6a38a4b14f265bd4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24c75109-10", "ovs_interfaceid": "24c75109-1060-4770-8c15-3bce8002f3e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.083716] env[63345]: DEBUG oslo_concurrency.lockutils [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1207.084126] env[63345]: DEBUG nova.compute.manager [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Instance network_info: |[{"id": "4b3221f1-3c39-4726-b760-339e16d0d89e", "address": "fa:16:3e:74:52:98", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b3221f1-3c", "ovs_interfaceid": "4b3221f1-3c39-4726-b760-339e16d0d89e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63345) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1207.084618] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:52:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b3221f1-3c39-4726-b760-339e16d0d89e', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1207.092338] env[63345]: DEBUG oslo.service.loopingcall [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1207.092554] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1207.092781] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b28c6b07-6760-4e20-bbaa-a7c07f49722b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.113031] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1207.113031] env[63345]: value = "task-1018014" [ 1207.113031] env[63345]: _type = "Task" [ 1207.113031] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.120868] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1018014, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.511430] env[63345]: DEBUG oslo_vmware.api [None req-e61b4a6f-5014-4bb9-bcfd-6f64800b9876 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018013, 'name': ReconfigVM_Task, 'duration_secs': 0.138745} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.511803] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-e61b4a6f-5014-4bb9-bcfd-6f64800b9876 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226187', 'volume_id': '2e1de061-987c-4b81-bc79-4a1a1952cb1a', 'name': 'volume-2e1de061-987c-4b81-bc79-4a1a1952cb1a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6b6ce545-0eca-4ef2-a859-c1e8ef978150', 'attached_at': '', 'detached_at': '', 'volume_id': '2e1de061-987c-4b81-bc79-4a1a1952cb1a', 'serial': '2e1de061-987c-4b81-bc79-4a1a1952cb1a'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1207.532290] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Releasing lock "refresh_cache-83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1207.532488] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Updated the network info_cache for instance {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 1207.532679] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.532838] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.532991] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.533174] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.533330] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.533477] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.533606] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63345) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 1207.533751] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager.update_available_resource {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.622498] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1018014, 'name': CreateVM_Task, 'duration_secs': 0.302602} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.622712] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1207.623350] env[63345]: DEBUG oslo_concurrency.lockutils [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1207.623524] env[63345]: DEBUG oslo_concurrency.lockutils [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.623856] env[63345]: DEBUG oslo_concurrency.lockutils [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1207.624117] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97d2c70e-19d7-4844-8e33-5f25dfbf6095 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.628266] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1207.628266] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]525a070c-c664-1613-8b37-f9a1a8b75cd8" [ 1207.628266] env[63345]: _type = "Task" [ 1207.628266] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.635621] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]525a070c-c664-1613-8b37-f9a1a8b75cd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.037022] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.037291] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1208.037469] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1208.037629] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63345) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1208.038571] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d707aac-dcc0-40b7-beac-ac6276b69f0a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.046478] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43ca8fd5-e1f0-4a34-888d-3c02f88e855e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.051265] env[63345]: DEBUG nova.objects.instance [None req-e61b4a6f-5014-4bb9-bcfd-6f64800b9876 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lazy-loading 'flavor' on Instance uuid 6b6ce545-0eca-4ef2-a859-c1e8ef978150 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1208.062241] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cbc59d4-bcbd-4a41-9ea6-b11a1cadac51 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.069012] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0896b49-a950-40c8-9ffd-70bd651acd89 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.097594] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180470MB free_disk=187GB free_vcpus=48 pci_devices=None {{(pid=63345) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1208.097752] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.097942] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1208.137756] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]525a070c-c664-1613-8b37-f9a1a8b75cd8, 'name': SearchDatastore_Task, 'duration_secs': 0.009037} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.138109] env[63345]: DEBUG oslo_concurrency.lockutils [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1208.138302] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Processing image 2ff49e1b-8f44-4332-bba9-777d55ff62c4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1208.138566] env[63345]: DEBUG oslo_concurrency.lockutils [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1208.138689] env[63345]: DEBUG oslo_concurrency.lockutils [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.138868] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1208.139134] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e672554-12cc-49d7-bc79-c21dd788fe1e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.146732] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1208.146909] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1208.147579] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3925a734-4b41-4e34-9ab6-c20b63da3699 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.152046] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1208.152046] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52d7d37e-9866-f6dd-363e-159f7f88fc11" [ 1208.152046] env[63345]: _type = "Task" [ 1208.152046] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.159460] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d7d37e-9866-f6dd-363e-159f7f88fc11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.662327] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52d7d37e-9866-f6dd-363e-159f7f88fc11, 'name': SearchDatastore_Task, 'duration_secs': 0.007291} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.662797] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72bfa243-c360-49ef-83d0-e7eb60a1d5c2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.667758] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1208.667758] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]526ac979-5706-abe1-dcea-f8b708595564" [ 1208.667758] env[63345]: _type = "Task" [ 1208.667758] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.674841] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526ac979-5706-abe1-dcea-f8b708595564, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.058059] env[63345]: DEBUG oslo_concurrency.lockutils [None req-e61b4a6f-5014-4bb9-bcfd-6f64800b9876 tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.214s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.122990] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 148c961e-d260-4dbd-ad9f-52f94b072096 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1209.123237] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1209.123379] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 937ba0d6-bf23-45ae-8d75-cd7559e436f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1209.124107] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 6b6ce545-0eca-4ef2-a859-c1e8ef978150 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1209.124107] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 53f759e4-0398-40ef-823a-3028d1ac82b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1209.124107] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1209.124107] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1209.179413] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]526ac979-5706-abe1-dcea-f8b708595564, 'name': SearchDatastore_Task, 'duration_secs': 0.008911} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.179694] env[63345]: DEBUG oslo_concurrency.lockutils [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1209.179953] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 53f759e4-0398-40ef-823a-3028d1ac82b1/53f759e4-0398-40ef-823a-3028d1ac82b1.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1209.180225] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50ef0c9e-4bb1-4ae8-8fd6-d91fdf343cb0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.188443] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1209.188443] env[63345]: value = "task-1018015" [ 1209.188443] env[63345]: _type = "Task" [ 1209.188443] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.192952] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a16024f-08f7-4f68-b412-0603760378ec {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.197944] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018015, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.201989] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4e394a-b443-4d71-a88e-3a02e284595d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.232532] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420135db-7ae4-4943-9f8a-285a753d2562 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.239488] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b0daaeb-31c7-42c2-a721-bae0140cb19e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.253324] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1209.700494] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018015, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.441374} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.700812] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2ff49e1b-8f44-4332-bba9-777d55ff62c4/2ff49e1b-8f44-4332-bba9-777d55ff62c4.vmdk to [datastore2] 53f759e4-0398-40ef-823a-3028d1ac82b1/53f759e4-0398-40ef-823a-3028d1ac82b1.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1209.701878] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Extending root virtual disk to 1048576 {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1209.701878] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe8ae9e6-1639-4207-9c0b-ce2ceb29988f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.708262] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1209.708262] env[63345]: value = "task-1018016" [ 1209.708262] env[63345]: _type = "Task" [ 1209.708262] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.715582] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018016, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.756220] env[63345]: DEBUG nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1210.217896] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018016, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066092} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.218176] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Extended root virtual disk {{(pid=63345) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1210.218869] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f9e4c8f-544e-4edd-89cd-1cb3167ef381 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.240054] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] 53f759e4-0398-40ef-823a-3028d1ac82b1/53f759e4-0398-40ef-823a-3028d1ac82b1.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1210.240435] env[63345]: DEBUG oslo_concurrency.lockutils [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1210.240659] env[63345]: DEBUG oslo_concurrency.lockutils [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1210.240860] env[63345]: DEBUG oslo_concurrency.lockutils [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1210.241060] env[63345]: DEBUG oslo_concurrency.lockutils [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1210.241236] env[63345]: DEBUG oslo_concurrency.lockutils [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1210.242811] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75ad85ee-14b0-4289-920b-eb1fc76e4e4a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.257090] env[63345]: INFO nova.compute.manager [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Terminating instance [ 1210.260857] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63345) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1210.260857] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.163s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1210.267488] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1210.267488] env[63345]: value = "task-1018017" [ 1210.267488] env[63345]: _type = "Task" [ 1210.267488] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.275490] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018017, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.761869] env[63345]: DEBUG nova.compute.manager [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1210.762291] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1210.763064] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790121e3-0b60-4f09-8041-d54f8043f788 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.772790] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1210.773381] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e40ac8c-411e-474d-b295-9364aca4c1e6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.777982] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018017, 'name': ReconfigVM_Task, 'duration_secs': 0.26693} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.778343] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Reconfigured VM instance instance-00000076 to attach disk [datastore2] 53f759e4-0398-40ef-823a-3028d1ac82b1/53f759e4-0398-40ef-823a-3028d1ac82b1.vmdk or device None with type sparse {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1210.779730] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb68bf89-13cf-455c-a112-cda0a3e23eaf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.781157] env[63345]: DEBUG oslo_vmware.api [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1210.781157] env[63345]: value = "task-1018018" [ 1210.781157] env[63345]: _type = "Task" [ 1210.781157] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.786195] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1210.786195] env[63345]: value = "task-1018019" [ 1210.786195] env[63345]: _type = "Task" [ 1210.786195] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.791609] env[63345]: DEBUG oslo_vmware.api [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018018, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.796236] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018019, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.293985] env[63345]: DEBUG oslo_vmware.api [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018018, 'name': PowerOffVM_Task, 'duration_secs': 0.228337} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.294622] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1211.294809] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1211.295078] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-444fdde2-6159-4eb1-8ab1-9ae9c3fba01a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.299259] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018019, 'name': Rename_Task, 'duration_secs': 0.216038} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.299802] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1211.300026] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb22fb79-d7ea-4730-be7b-b1a49703f86a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.305096] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1211.305096] env[63345]: value = "task-1018021" [ 1211.305096] env[63345]: _type = "Task" [ 1211.305096] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.312101] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018021, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.382277] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1211.382583] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1211.382829] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Deleting the datastore file [datastore2] 6b6ce545-0eca-4ef2-a859-c1e8ef978150 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1211.383115] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-046711b5-8e1f-4d4a-a237-03e1036f068b {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.389622] env[63345]: DEBUG oslo_vmware.api [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for the task: (returnval){ [ 1211.389622] env[63345]: value = "task-1018022" [ 1211.389622] env[63345]: _type = "Task" [ 1211.389622] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.397372] env[63345]: DEBUG oslo_vmware.api [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018022, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.814580] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018021, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.899684] env[63345]: DEBUG oslo_vmware.api [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Task: {'id': task-1018022, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13732} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.899943] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1211.900153] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1211.900347] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1211.900525] env[63345]: INFO nova.compute.manager [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1211.900773] env[63345]: DEBUG oslo.service.loopingcall [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1211.900972] env[63345]: DEBUG nova.compute.manager [-] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1211.901081] env[63345]: DEBUG nova.network.neutron [-] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1212.317606] env[63345]: DEBUG oslo_vmware.api [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018021, 'name': PowerOnVM_Task, 'duration_secs': 0.689489} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.318160] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1212.318160] env[63345]: INFO nova.compute.manager [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Took 8.27 seconds to spawn the instance on the hypervisor. [ 1212.318319] env[63345]: DEBUG nova.compute.manager [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1212.319773] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90416aec-aeed-4ddd-8699-056f5a06a96f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.331927] env[63345]: DEBUG nova.compute.manager [req-97633429-daf3-4ea4-96e7-d999207611c6 req-86a2b8a2-38bd-4ead-abd5-254a096265dc service nova] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Received event network-vif-deleted-c20cba8e-091b-4afd-9e5e-4d87441d4aea {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1212.331982] env[63345]: INFO nova.compute.manager [req-97633429-daf3-4ea4-96e7-d999207611c6 req-86a2b8a2-38bd-4ead-abd5-254a096265dc service nova] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Neutron deleted interface c20cba8e-091b-4afd-9e5e-4d87441d4aea; detaching it from the instance and deleting it from the info cache [ 1212.332159] env[63345]: DEBUG nova.network.neutron [req-97633429-daf3-4ea4-96e7-d999207611c6 req-86a2b8a2-38bd-4ead-abd5-254a096265dc service nova] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.809547] env[63345]: DEBUG nova.network.neutron [-] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.836496] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bdbdf083-11df-4ddb-a4b7-985370cfc1df {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.846909] env[63345]: INFO nova.compute.manager [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Took 12.98 seconds to build instance. [ 1212.850866] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c669f89c-8f41-4337-9853-0910df3a0d6f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.883960] env[63345]: DEBUG nova.compute.manager [req-97633429-daf3-4ea4-96e7-d999207611c6 req-86a2b8a2-38bd-4ead-abd5-254a096265dc service nova] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Detach interface failed, port_id=c20cba8e-091b-4afd-9e5e-4d87441d4aea, reason: Instance 6b6ce545-0eca-4ef2-a859-c1e8ef978150 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1213.077575] env[63345]: DEBUG nova.compute.manager [req-53f59bbb-80f7-4ff2-801b-49590c2cb078 req-40f476f2-5411-4067-ac4f-a7b7fde7b750 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Received event network-changed-4b3221f1-3c39-4726-b760-339e16d0d89e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1213.077783] env[63345]: DEBUG nova.compute.manager [req-53f59bbb-80f7-4ff2-801b-49590c2cb078 req-40f476f2-5411-4067-ac4f-a7b7fde7b750 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Refreshing instance network info cache due to event network-changed-4b3221f1-3c39-4726-b760-339e16d0d89e. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1213.078013] env[63345]: DEBUG oslo_concurrency.lockutils [req-53f59bbb-80f7-4ff2-801b-49590c2cb078 req-40f476f2-5411-4067-ac4f-a7b7fde7b750 service nova] Acquiring lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1213.078179] env[63345]: DEBUG oslo_concurrency.lockutils [req-53f59bbb-80f7-4ff2-801b-49590c2cb078 req-40f476f2-5411-4067-ac4f-a7b7fde7b750 service nova] Acquired lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.078382] env[63345]: DEBUG nova.network.neutron [req-53f59bbb-80f7-4ff2-801b-49590c2cb078 req-40f476f2-5411-4067-ac4f-a7b7fde7b750 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Refreshing network info cache for port 4b3221f1-3c39-4726-b760-339e16d0d89e {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1213.313808] env[63345]: INFO nova.compute.manager [-] [instance: 6b6ce545-0eca-4ef2-a859-c1e8ef978150] Took 1.41 seconds to deallocate network for instance. [ 1213.348666] env[63345]: DEBUG oslo_concurrency.lockutils [None req-865dd329-91b6-4925-8d08-41a6d289bb9f tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "53f759e4-0398-40ef-823a-3028d1ac82b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.485s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.782722] env[63345]: DEBUG nova.network.neutron [req-53f59bbb-80f7-4ff2-801b-49590c2cb078 req-40f476f2-5411-4067-ac4f-a7b7fde7b750 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Updated VIF entry in instance network info cache for port 4b3221f1-3c39-4726-b760-339e16d0d89e. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1213.783140] env[63345]: DEBUG nova.network.neutron [req-53f59bbb-80f7-4ff2-801b-49590c2cb078 req-40f476f2-5411-4067-ac4f-a7b7fde7b750 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Updating instance_info_cache with network_info: [{"id": "4b3221f1-3c39-4726-b760-339e16d0d89e", "address": "fa:16:3e:74:52:98", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b3221f1-3c", "ovs_interfaceid": "4b3221f1-3c39-4726-b760-339e16d0d89e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.819097] env[63345]: DEBUG oslo_concurrency.lockutils [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1213.819476] env[63345]: DEBUG oslo_concurrency.lockutils [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1213.819799] env[63345]: DEBUG nova.objects.instance [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lazy-loading 'resources' on Instance uuid 6b6ce545-0eca-4ef2-a859-c1e8ef978150 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1214.286540] env[63345]: DEBUG oslo_concurrency.lockutils [req-53f59bbb-80f7-4ff2-801b-49590c2cb078 req-40f476f2-5411-4067-ac4f-a7b7fde7b750 service nova] Releasing lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1214.396610] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2da325-4272-42a4-8bbb-b4c519c51771 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.405253] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-967368a4-69c7-46a0-abbe-739c59053c75 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.436124] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030dc77e-bde3-458c-8847-4f1bb7295618 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.444048] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d57f00e-d0db-4eb1-b363-a8f0fb6bc51d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.457747] env[63345]: DEBUG nova.compute.provider_tree [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1214.960879] env[63345]: DEBUG nova.scheduler.client.report [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1215.467354] env[63345]: DEBUG oslo_concurrency.lockutils [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.648s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.487007] env[63345]: INFO nova.scheduler.client.report [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Deleted allocations for instance 6b6ce545-0eca-4ef2-a859-c1e8ef978150 [ 1215.994957] env[63345]: DEBUG oslo_concurrency.lockutils [None req-72cf1307-c141-47bc-b1c2-6d537e808f7d tempest-AttachVolumeTestJSON-569378185 tempest-AttachVolumeTestJSON-569378185-project-member] Lock "6b6ce545-0eca-4ef2-a859-c1e8ef978150" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.754s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.051697] env[63345]: DEBUG oslo_concurrency.lockutils [None req-67bb0c90-36c3-4bc8-8a35-6bd8d976f2b2 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1233.052148] env[63345]: DEBUG oslo_concurrency.lockutils [None req-67bb0c90-36c3-4bc8-8a35-6bd8d976f2b2 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.555448] env[63345]: INFO nova.compute.manager [None req-67bb0c90-36c3-4bc8-8a35-6bd8d976f2b2 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Detaching volume 6c50c9be-1b18-4a50-af11-ccaeacb957ca [ 1233.586282] env[63345]: INFO nova.virt.block_device [None req-67bb0c90-36c3-4bc8-8a35-6bd8d976f2b2 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Attempting to driver detach volume 6c50c9be-1b18-4a50-af11-ccaeacb957ca from mountpoint /dev/sdb [ 1233.586556] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-67bb0c90-36c3-4bc8-8a35-6bd8d976f2b2 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Volume detach. Driver type: vmdk {{(pid=63345) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1233.586751] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-67bb0c90-36c3-4bc8-8a35-6bd8d976f2b2 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226185', 'volume_id': '6c50c9be-1b18-4a50-af11-ccaeacb957ca', 'name': 'volume-6c50c9be-1b18-4a50-af11-ccaeacb957ca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '937ba0d6-bf23-45ae-8d75-cd7559e436f5', 'attached_at': '', 'detached_at': '', 'volume_id': '6c50c9be-1b18-4a50-af11-ccaeacb957ca', 'serial': '6c50c9be-1b18-4a50-af11-ccaeacb957ca'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1233.587680] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae823e16-34c4-4736-b343-8850ce6b2a3e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.609451] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec771b5b-7704-40e5-9d19-9899d6e50f02 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.616634] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73fc7e88-7f33-4147-ae7e-52cf95c685b6 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.637351] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17103919-0bb8-4f79-b21f-252dd2f0ce7f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.651361] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-67bb0c90-36c3-4bc8-8a35-6bd8d976f2b2 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] The volume has not been displaced from its original location: [datastore2] volume-6c50c9be-1b18-4a50-af11-ccaeacb957ca/volume-6c50c9be-1b18-4a50-af11-ccaeacb957ca.vmdk. No consolidation needed. {{(pid=63345) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1233.656543] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-67bb0c90-36c3-4bc8-8a35-6bd8d976f2b2 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Reconfiguring VM instance instance-00000073 to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1233.656832] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a66a6fd5-ec00-471f-ab3b-881b79c3aad0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.673964] env[63345]: DEBUG oslo_vmware.api [None req-67bb0c90-36c3-4bc8-8a35-6bd8d976f2b2 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1233.673964] env[63345]: value = "task-1018026" [ 1233.673964] env[63345]: _type = "Task" [ 1233.673964] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.681341] env[63345]: DEBUG oslo_vmware.api [None req-67bb0c90-36c3-4bc8-8a35-6bd8d976f2b2 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018026, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.183461] env[63345]: DEBUG oslo_vmware.api [None req-67bb0c90-36c3-4bc8-8a35-6bd8d976f2b2 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018026, 'name': ReconfigVM_Task, 'duration_secs': 0.217535} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.183841] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-67bb0c90-36c3-4bc8-8a35-6bd8d976f2b2 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Reconfigured VM instance instance-00000073 to detach disk 2001 {{(pid=63345) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1234.188316] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adfec423-de26-490a-b01e-ed97c40cdcfe {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.202626] env[63345]: DEBUG oslo_vmware.api [None req-67bb0c90-36c3-4bc8-8a35-6bd8d976f2b2 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1234.202626] env[63345]: value = "task-1018027" [ 1234.202626] env[63345]: _type = "Task" [ 1234.202626] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.209814] env[63345]: DEBUG oslo_vmware.api [None req-67bb0c90-36c3-4bc8-8a35-6bd8d976f2b2 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018027, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.711921] env[63345]: DEBUG oslo_vmware.api [None req-67bb0c90-36c3-4bc8-8a35-6bd8d976f2b2 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018027, 'name': ReconfigVM_Task, 'duration_secs': 0.130818} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.712240] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-67bb0c90-36c3-4bc8-8a35-6bd8d976f2b2 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-226185', 'volume_id': '6c50c9be-1b18-4a50-af11-ccaeacb957ca', 'name': 'volume-6c50c9be-1b18-4a50-af11-ccaeacb957ca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '937ba0d6-bf23-45ae-8d75-cd7559e436f5', 'attached_at': '', 'detached_at': '', 'volume_id': '6c50c9be-1b18-4a50-af11-ccaeacb957ca', 'serial': '6c50c9be-1b18-4a50-af11-ccaeacb957ca'} {{(pid=63345) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1235.251341] env[63345]: DEBUG nova.objects.instance [None req-67bb0c90-36c3-4bc8-8a35-6bd8d976f2b2 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lazy-loading 'flavor' on Instance uuid 937ba0d6-bf23-45ae-8d75-cd7559e436f5 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1236.259922] env[63345]: DEBUG oslo_concurrency.lockutils [None req-67bb0c90-36c3-4bc8-8a35-6bd8d976f2b2 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.208s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.360476] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1237.360878] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.360954] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1237.361153] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.361331] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.363467] env[63345]: INFO nova.compute.manager [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Terminating instance [ 1237.867123] env[63345]: DEBUG nova.compute.manager [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1237.867427] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1237.868324] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d5f2f6-a2f2-4e48-9a9f-602ce1ec3a8a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.876524] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1237.876788] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eace95a8-bbfc-49a2-9090-ac1fcfeefb7c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.882768] env[63345]: DEBUG oslo_vmware.api [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1237.882768] env[63345]: value = "task-1018028" [ 1237.882768] env[63345]: _type = "Task" [ 1237.882768] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.892248] env[63345]: DEBUG oslo_vmware.api [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018028, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.393155] env[63345]: DEBUG oslo_vmware.api [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018028, 'name': PowerOffVM_Task, 'duration_secs': 0.176345} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.393537] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1238.393609] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1238.393819] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fecd766b-8d4a-4022-ade0-0b5808180c1d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.459503] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1238.459765] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1238.459973] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Deleting the datastore file [datastore2] 937ba0d6-bf23-45ae-8d75-cd7559e436f5 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1238.460265] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-598fd293-fdd9-4f76-9f57-c9ea428e865a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.466993] env[63345]: DEBUG oslo_vmware.api [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1238.466993] env[63345]: value = "task-1018030" [ 1238.466993] env[63345]: _type = "Task" [ 1238.466993] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.474076] env[63345]: DEBUG oslo_vmware.api [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018030, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.976237] env[63345]: DEBUG oslo_vmware.api [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018030, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131676} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.976505] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1238.976714] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1238.976898] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1238.977092] env[63345]: INFO nova.compute.manager [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1238.977344] env[63345]: DEBUG oslo.service.loopingcall [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1238.977540] env[63345]: DEBUG nova.compute.manager [-] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1238.977637] env[63345]: DEBUG nova.network.neutron [-] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1239.455302] env[63345]: DEBUG nova.compute.manager [req-d271aff8-d259-429b-b1cf-63e9aa6f0565 req-6e21b8ba-9772-48ba-9cbd-db443de891d3 service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Received event network-vif-deleted-4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1239.455669] env[63345]: INFO nova.compute.manager [req-d271aff8-d259-429b-b1cf-63e9aa6f0565 req-6e21b8ba-9772-48ba-9cbd-db443de891d3 service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Neutron deleted interface 4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b; detaching it from the instance and deleting it from the info cache [ 1239.455830] env[63345]: DEBUG nova.network.neutron [req-d271aff8-d259-429b-b1cf-63e9aa6f0565 req-6e21b8ba-9772-48ba-9cbd-db443de891d3 service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.934812] env[63345]: DEBUG nova.network.neutron [-] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.958730] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-496184d0-4eb2-4572-b114-9534c5eb3c1a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.968733] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb0e490-7c1b-4bf3-87de-b304171516cc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.994032] env[63345]: DEBUG nova.compute.manager [req-d271aff8-d259-429b-b1cf-63e9aa6f0565 req-6e21b8ba-9772-48ba-9cbd-db443de891d3 service nova] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Detach interface failed, port_id=4244898c-6ed5-4ae5-9bdb-12a31a9d8a9b, reason: Instance 937ba0d6-bf23-45ae-8d75-cd7559e436f5 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1240.437473] env[63345]: INFO nova.compute.manager [-] [instance: 937ba0d6-bf23-45ae-8d75-cd7559e436f5] Took 1.46 seconds to deallocate network for instance. [ 1240.945480] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.945762] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.946011] env[63345]: DEBUG nova.objects.instance [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lazy-loading 'resources' on Instance uuid 937ba0d6-bf23-45ae-8d75-cd7559e436f5 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1241.506076] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1018ed-f7c7-447c-84f8-35d6d21c4e16 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.513526] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585cba35-5bcf-4697-b81c-933df4946c25 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.543425] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29cc5627-ce74-40bf-841a-ebffbdd0bb79 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.550130] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a19253-b164-46c2-a1b3-914da48d11dc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.562652] env[63345]: DEBUG nova.compute.provider_tree [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1242.065992] env[63345]: DEBUG nova.scheduler.client.report [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1242.571373] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.625s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1242.595953] env[63345]: INFO nova.scheduler.client.report [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Deleted allocations for instance 937ba0d6-bf23-45ae-8d75-cd7559e436f5 [ 1243.104916] env[63345]: DEBUG oslo_concurrency.lockutils [None req-6203ae64-e54d-4333-9395-b638ab418a6d tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "937ba0d6-bf23-45ae-8d75-cd7559e436f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.744s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.521353] env[63345]: DEBUG oslo_concurrency.lockutils [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.521756] env[63345]: DEBUG oslo_concurrency.lockutils [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.521864] env[63345]: DEBUG oslo_concurrency.lockutils [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "83ef21e9-62eb-4f0d-9c0c-a038743e0dd8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.522054] env[63345]: DEBUG oslo_concurrency.lockutils [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "83ef21e9-62eb-4f0d-9c0c-a038743e0dd8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.522238] env[63345]: DEBUG oslo_concurrency.lockutils [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "83ef21e9-62eb-4f0d-9c0c-a038743e0dd8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.526190] env[63345]: INFO nova.compute.manager [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Terminating instance [ 1249.030021] env[63345]: DEBUG nova.compute.manager [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1249.030291] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1249.031261] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc49fd0-7aed-4556-b2e7-6fcf210dcc4e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.039021] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1249.039262] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c907b32-1044-4ac3-9a55-a4b3cadca883 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.045382] env[63345]: DEBUG oslo_vmware.api [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1249.045382] env[63345]: value = "task-1018032" [ 1249.045382] env[63345]: _type = "Task" [ 1249.045382] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.053133] env[63345]: DEBUG oslo_vmware.api [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018032, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.554818] env[63345]: DEBUG oslo_vmware.api [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018032, 'name': PowerOffVM_Task, 'duration_secs': 0.231121} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.555107] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1249.555288] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1249.555566] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21475607-0059-464b-b0ce-be87af25ce31 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.617115] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1249.617374] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1249.617579] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Deleting the datastore file [datastore2] 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1249.617866] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7feeed8d-0062-45f6-bedc-f21f64846f88 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.624229] env[63345]: DEBUG oslo_vmware.api [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1249.624229] env[63345]: value = "task-1018034" [ 1249.624229] env[63345]: _type = "Task" [ 1249.624229] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.631810] env[63345]: DEBUG oslo_vmware.api [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018034, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.134536] env[63345]: DEBUG oslo_vmware.api [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018034, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167664} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.134808] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1250.134989] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1250.135224] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1250.135363] env[63345]: INFO nova.compute.manager [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1250.135656] env[63345]: DEBUG oslo.service.loopingcall [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1250.135866] env[63345]: DEBUG nova.compute.manager [-] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1250.135955] env[63345]: DEBUG nova.network.neutron [-] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1250.137682] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "53f759e4-0398-40ef-823a-3028d1ac82b1" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.137936] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "53f759e4-0398-40ef-823a-3028d1ac82b1" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.138163] env[63345]: INFO nova.compute.manager [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Shelving [ 1250.388284] env[63345]: DEBUG nova.compute.manager [req-f8aefaaf-d522-44a7-b5f3-10e289f4fb8b req-ab1d7f01-317c-442b-bb67-c01fa0afb2a4 service nova] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Received event network-vif-deleted-24c75109-1060-4770-8c15-3bce8002f3e0 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1250.388284] env[63345]: INFO nova.compute.manager [req-f8aefaaf-d522-44a7-b5f3-10e289f4fb8b req-ab1d7f01-317c-442b-bb67-c01fa0afb2a4 service nova] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Neutron deleted interface 24c75109-1060-4770-8c15-3bce8002f3e0; detaching it from the instance and deleting it from the info cache [ 1250.388477] env[63345]: DEBUG nova.network.neutron [req-f8aefaaf-d522-44a7-b5f3-10e289f4fb8b req-ab1d7f01-317c-442b-bb67-c01fa0afb2a4 service nova] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.867417] env[63345]: DEBUG nova.network.neutron [-] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.894042] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-974c686b-92ff-4c9d-90d1-514addcd2352 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.902692] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294fe5b6-ceb2-4b5c-ab4a-951e399b0335 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.926237] env[63345]: DEBUG nova.compute.manager [req-f8aefaaf-d522-44a7-b5f3-10e289f4fb8b req-ab1d7f01-317c-442b-bb67-c01fa0afb2a4 service nova] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Detach interface failed, port_id=24c75109-1060-4770-8c15-3bce8002f3e0, reason: Instance 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1251.147521] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1251.147828] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b87e057a-c981-4d17-ad0a-257ffc41111a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.154650] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1251.154650] env[63345]: value = "task-1018035" [ 1251.154650] env[63345]: _type = "Task" [ 1251.154650] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.162390] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018035, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.370493] env[63345]: INFO nova.compute.manager [-] [instance: 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8] Took 1.23 seconds to deallocate network for instance. [ 1251.664743] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018035, 'name': PowerOffVM_Task, 'duration_secs': 0.187298} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.665018] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1251.665857] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7c7160-0533-40ac-a45b-3b40b5105367 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.683547] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4ef7d9-38d0-4e9c-8839-34f833748f70 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.876841] env[63345]: DEBUG oslo_concurrency.lockutils [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1251.877226] env[63345]: DEBUG oslo_concurrency.lockutils [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1251.877366] env[63345]: DEBUG nova.objects.instance [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lazy-loading 'resources' on Instance uuid 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1252.192953] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Creating Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1252.193237] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-09851385-8c6d-45a3-b0ee-bf005a4195d9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.201982] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1252.201982] env[63345]: value = "task-1018036" [ 1252.201982] env[63345]: _type = "Task" [ 1252.201982] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.209969] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018036, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.433014] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3a62aa-66fd-4893-a647-44e3df63fab9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.440626] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58879bd-743b-4b06-9527-68343789d6cc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.473842] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3eebbf8-ef9e-4b16-8376-9ee3c67796ce {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.481321] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2948818f-a0d0-4e33-b607-5c5399e392a0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.494569] env[63345]: DEBUG nova.compute.provider_tree [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1252.715354] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018036, 'name': CreateSnapshot_Task, 'duration_secs': 0.450005} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.715747] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Created Snapshot of the VM instance {{(pid=63345) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1252.716917] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee42f83-bd0b-430d-8105-5c556396afbf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.998014] env[63345]: DEBUG nova.scheduler.client.report [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1253.237830] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Creating linked-clone VM from snapshot {{(pid=63345) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1253.238151] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a2582725-5a9a-4b61-8974-675758ba41c4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.246548] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1253.246548] env[63345]: value = "task-1018037" [ 1253.246548] env[63345]: _type = "Task" [ 1253.246548] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.254283] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018037, 'name': CloneVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.502514] env[63345]: DEBUG oslo_concurrency.lockutils [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.625s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1253.524863] env[63345]: INFO nova.scheduler.client.report [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Deleted allocations for instance 83ef21e9-62eb-4f0d-9c0c-a038743e0dd8 [ 1253.756811] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018037, 'name': CloneVM_Task} progress is 94%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.032326] env[63345]: DEBUG oslo_concurrency.lockutils [None req-43fb7a55-172f-4e48-9811-16746b555530 tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "83ef21e9-62eb-4f0d-9c0c-a038743e0dd8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.511s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1254.258313] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018037, 'name': CloneVM_Task, 'duration_secs': 0.956323} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.258581] env[63345]: INFO nova.virt.vmwareapi.vmops [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Created linked-clone VM from snapshot [ 1254.259325] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-965e3200-f189-4e2d-84d4-f9495124ae60 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.266422] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Uploading image e478039f-650f-4882-b1e8-a69bb1f9adf4 {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:442}} [ 1254.289586] env[63345]: DEBUG oslo_vmware.rw_handles [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1254.289586] env[63345]: value = "vm-226190" [ 1254.289586] env[63345]: _type = "VirtualMachine" [ 1254.289586] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1254.289856] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bb916fb7-e363-4c28-a86b-008f13041ae8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.296904] env[63345]: DEBUG oslo_vmware.rw_handles [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lease: (returnval){ [ 1254.296904] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52eb63d1-13f2-7bee-f606-2f279954c5ac" [ 1254.296904] env[63345]: _type = "HttpNfcLease" [ 1254.296904] env[63345]: } obtained for exporting VM: (result){ [ 1254.296904] env[63345]: value = "vm-226190" [ 1254.296904] env[63345]: _type = "VirtualMachine" [ 1254.296904] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1254.297244] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the lease: (returnval){ [ 1254.297244] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52eb63d1-13f2-7bee-f606-2f279954c5ac" [ 1254.297244] env[63345]: _type = "HttpNfcLease" [ 1254.297244] env[63345]: } to be ready. {{(pid=63345) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1254.302858] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1254.302858] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52eb63d1-13f2-7bee-f606-2f279954c5ac" [ 1254.302858] env[63345]: _type = "HttpNfcLease" [ 1254.302858] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1254.805265] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1254.805265] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52eb63d1-13f2-7bee-f606-2f279954c5ac" [ 1254.805265] env[63345]: _type = "HttpNfcLease" [ 1254.805265] env[63345]: } is ready. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1254.805625] env[63345]: DEBUG oslo_vmware.rw_handles [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1254.805625] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52eb63d1-13f2-7bee-f606-2f279954c5ac" [ 1254.805625] env[63345]: _type = "HttpNfcLease" [ 1254.805625] env[63345]: }. {{(pid=63345) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1254.806348] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc650780-479e-42ac-8163-4ee126305ec9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.813344] env[63345]: DEBUG oslo_vmware.rw_handles [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522eeb9f-e52a-e19d-33c9-f20a17fb918b/disk-0.vmdk from lease info. {{(pid=63345) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1254.813527] env[63345]: DEBUG oslo_vmware.rw_handles [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522eeb9f-e52a-e19d-33c9-f20a17fb918b/disk-0.vmdk for reading. {{(pid=63345) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1254.898024] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-42a8ac7c-b6b8-45e6-90fa-48931559fe06 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.163687] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "148c961e-d260-4dbd-ad9f-52f94b072096" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.164141] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "148c961e-d260-4dbd-ad9f-52f94b072096" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.164332] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "148c961e-d260-4dbd-ad9f-52f94b072096-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.164530] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "148c961e-d260-4dbd-ad9f-52f94b072096-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.164707] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "148c961e-d260-4dbd-ad9f-52f94b072096-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.166991] env[63345]: INFO nova.compute.manager [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Terminating instance [ 1255.671101] env[63345]: DEBUG nova.compute.manager [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1255.671506] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1255.672485] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790439fa-0946-42a6-b5f9-801ee9e51ced {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.681031] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1255.681359] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e0d67c3-88dd-4db1-a0f1-d5c0cfcb8017 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.687578] env[63345]: DEBUG oslo_vmware.api [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1255.687578] env[63345]: value = "task-1018039" [ 1255.687578] env[63345]: _type = "Task" [ 1255.687578] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.695750] env[63345]: DEBUG oslo_vmware.api [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018039, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.198518] env[63345]: DEBUG oslo_vmware.api [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018039, 'name': PowerOffVM_Task, 'duration_secs': 0.256741} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.198993] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1256.199338] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1256.199678] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-510c731b-65e2-4ffe-9ba9-0ca02fe5ec07 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.286834] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1256.287219] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1256.287530] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Deleting the datastore file [datastore2] 148c961e-d260-4dbd-ad9f-52f94b072096 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1256.287905] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0140a5e0-5540-4497-a88f-818ca94b37bb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.294363] env[63345]: DEBUG oslo_vmware.api [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for the task: (returnval){ [ 1256.294363] env[63345]: value = "task-1018041" [ 1256.294363] env[63345]: _type = "Task" [ 1256.294363] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.302125] env[63345]: DEBUG oslo_vmware.api [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018041, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.804450] env[63345]: DEBUG oslo_vmware.api [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Task: {'id': task-1018041, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163376} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.804785] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1256.805015] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1256.805214] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1256.805395] env[63345]: INFO nova.compute.manager [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1256.805675] env[63345]: DEBUG oslo.service.loopingcall [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1256.805887] env[63345]: DEBUG nova.compute.manager [-] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1256.805982] env[63345]: DEBUG nova.network.neutron [-] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1257.084872] env[63345]: DEBUG nova.compute.manager [req-a7dc05a1-5026-4b4a-ae6b-a34fdb7ad26f req-de7f012b-0796-408a-bb37-a870a2ac1f4b service nova] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Received event network-vif-deleted-f2837ec1-0df3-454a-bc68-fb0ca9562eb4 {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1257.085137] env[63345]: INFO nova.compute.manager [req-a7dc05a1-5026-4b4a-ae6b-a34fdb7ad26f req-de7f012b-0796-408a-bb37-a870a2ac1f4b service nova] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Neutron deleted interface f2837ec1-0df3-454a-bc68-fb0ca9562eb4; detaching it from the instance and deleting it from the info cache [ 1257.085355] env[63345]: DEBUG nova.network.neutron [req-a7dc05a1-5026-4b4a-ae6b-a34fdb7ad26f req-de7f012b-0796-408a-bb37-a870a2ac1f4b service nova] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.560646] env[63345]: DEBUG nova.network.neutron [-] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.588853] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7f9e16ff-a36a-4978-b846-25909b228fc2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.598625] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e8c0b9-5f90-4812-b2b5-2a77512464c9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.622774] env[63345]: DEBUG nova.compute.manager [req-a7dc05a1-5026-4b4a-ae6b-a34fdb7ad26f req-de7f012b-0796-408a-bb37-a870a2ac1f4b service nova] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Detach interface failed, port_id=f2837ec1-0df3-454a-bc68-fb0ca9562eb4, reason: Instance 148c961e-d260-4dbd-ad9f-52f94b072096 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1258.064036] env[63345]: INFO nova.compute.manager [-] [instance: 148c961e-d260-4dbd-ad9f-52f94b072096] Took 1.26 seconds to deallocate network for instance. [ 1258.571333] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.571813] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.571935] env[63345]: DEBUG nova.objects.instance [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lazy-loading 'resources' on Instance uuid 148c961e-d260-4dbd-ad9f-52f94b072096 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1259.125304] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1295f9-fae0-45a6-acfb-79e7877bfe45 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.131414] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1319ac5-4682-43a8-a89a-6be2f99b30f8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.161790] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8368d16-7b2b-4a1b-b91a-85a210c58555 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.169387] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32020fde-aea7-4890-8600-99d7a3ba79ee {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.184087] env[63345]: DEBUG nova.compute.provider_tree [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1259.687615] env[63345]: DEBUG nova.scheduler.client.report [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1260.192142] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.620s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1260.210875] env[63345]: INFO nova.scheduler.client.report [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Deleted allocations for instance 148c961e-d260-4dbd-ad9f-52f94b072096 [ 1260.718311] env[63345]: DEBUG oslo_concurrency.lockutils [None req-4f318f54-4894-422a-9149-b47e16ee0e6f tempest-ServerRescueNegativeTestJSON-736059177 tempest-ServerRescueNegativeTestJSON-736059177-project-member] Lock "148c961e-d260-4dbd-ad9f-52f94b072096" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.554s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.855164] env[63345]: DEBUG oslo_vmware.rw_handles [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522eeb9f-e52a-e19d-33c9-f20a17fb918b/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1262.856188] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031081eb-02b5-4750-9636-09594741a70d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.863143] env[63345]: DEBUG oslo_vmware.rw_handles [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522eeb9f-e52a-e19d-33c9-f20a17fb918b/disk-0.vmdk is in state: ready. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1262.863332] env[63345]: ERROR oslo_vmware.rw_handles [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522eeb9f-e52a-e19d-33c9-f20a17fb918b/disk-0.vmdk due to incomplete transfer. [ 1262.863561] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d615127f-098c-4bcf-857f-03c3e1c11909 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.871187] env[63345]: DEBUG oslo_vmware.rw_handles [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522eeb9f-e52a-e19d-33c9-f20a17fb918b/disk-0.vmdk. {{(pid=63345) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1262.871402] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Uploaded image e478039f-650f-4882-b1e8-a69bb1f9adf4 to the Glance image server {{(pid=63345) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:473}} [ 1262.873544] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Destroying the VM {{(pid=63345) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1358}} [ 1262.873785] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c859b57f-f110-4ed7-b02d-d92b7564091f {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.879413] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1262.879413] env[63345]: value = "task-1018042" [ 1262.879413] env[63345]: _type = "Task" [ 1262.879413] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.888277] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018042, 'name': Destroy_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.388453] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018042, 'name': Destroy_Task, 'duration_secs': 0.374495} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.388757] env[63345]: INFO nova.virt.vmwareapi.vm_util [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Destroyed the VM [ 1263.389017] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Deleting Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1263.389294] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ca34f5e7-6b8a-4638-9baa-94aa6dc1ff44 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.395784] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1263.395784] env[63345]: value = "task-1018043" [ 1263.395784] env[63345]: _type = "Task" [ 1263.395784] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.403637] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018043, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.905560] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018043, 'name': RemoveSnapshot_Task, 'duration_secs': 0.359048} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.905949] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Deleted Snapshot of the VM instance {{(pid=63345) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1263.906093] env[63345]: DEBUG nova.compute.manager [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1263.906880] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b53204f-9508-4f7b-97a6-40b114cfcc7d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.419124] env[63345]: INFO nova.compute.manager [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Shelve offloading [ 1264.923195] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1264.923502] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c5e76de-fe0e-409f-8a3e-52de0ad6f28c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.931793] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1264.931793] env[63345]: value = "task-1018044" [ 1264.931793] env[63345]: _type = "Task" [ 1264.931793] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.941216] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] VM already powered off {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1264.941424] env[63345]: DEBUG nova.compute.manager [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1264.942139] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f3986d5-1904-42fb-b553-37872b31ec04 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.947435] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1264.947609] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.947822] env[63345]: DEBUG nova.network.neutron [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1265.650585] env[63345]: DEBUG nova.network.neutron [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Updating instance_info_cache with network_info: [{"id": "4b3221f1-3c39-4726-b760-339e16d0d89e", "address": "fa:16:3e:74:52:98", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b3221f1-3c", "ovs_interfaceid": "4b3221f1-3c39-4726-b760-339e16d0d89e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.153594] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1266.380186] env[63345]: DEBUG nova.compute.manager [req-58ca1037-2c85-408f-9d33-f11a70cc6ad4 req-88fdb8fe-0564-44d1-b196-fa7cff08ea4a service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Received event network-vif-unplugged-4b3221f1-3c39-4726-b760-339e16d0d89e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1266.380535] env[63345]: DEBUG oslo_concurrency.lockutils [req-58ca1037-2c85-408f-9d33-f11a70cc6ad4 req-88fdb8fe-0564-44d1-b196-fa7cff08ea4a service nova] Acquiring lock "53f759e4-0398-40ef-823a-3028d1ac82b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1266.380651] env[63345]: DEBUG oslo_concurrency.lockutils [req-58ca1037-2c85-408f-9d33-f11a70cc6ad4 req-88fdb8fe-0564-44d1-b196-fa7cff08ea4a service nova] Lock "53f759e4-0398-40ef-823a-3028d1ac82b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1266.380826] env[63345]: DEBUG oslo_concurrency.lockutils [req-58ca1037-2c85-408f-9d33-f11a70cc6ad4 req-88fdb8fe-0564-44d1-b196-fa7cff08ea4a service nova] Lock "53f759e4-0398-40ef-823a-3028d1ac82b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1266.380998] env[63345]: DEBUG nova.compute.manager [req-58ca1037-2c85-408f-9d33-f11a70cc6ad4 req-88fdb8fe-0564-44d1-b196-fa7cff08ea4a service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] No waiting events found dispatching network-vif-unplugged-4b3221f1-3c39-4726-b760-339e16d0d89e {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1266.381353] env[63345]: WARNING nova.compute.manager [req-58ca1037-2c85-408f-9d33-f11a70cc6ad4 req-88fdb8fe-0564-44d1-b196-fa7cff08ea4a service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Received unexpected event network-vif-unplugged-4b3221f1-3c39-4726-b760-339e16d0d89e for instance with vm_state shelved and task_state shelving_offloading. [ 1266.472771] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1266.474123] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b946cf0-d838-4b68-aba2-42fc019b86ae {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.482771] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1266.483032] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-29d78dcd-d5ec-49f5-b83b-447a81d85421 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.562013] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1266.562283] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1266.562478] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Deleting the datastore file [datastore2] 53f759e4-0398-40ef-823a-3028d1ac82b1 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1266.562744] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a0987dd0-a503-4f78-a043-4ba614e11732 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.570215] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1266.570215] env[63345]: value = "task-1018046" [ 1266.570215] env[63345]: _type = "Task" [ 1266.570215] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.577593] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018046, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.080044] env[63345]: DEBUG oslo_vmware.api [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018046, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149343} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.080317] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1267.080513] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1267.080695] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1267.105303] env[63345]: INFO nova.scheduler.client.report [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Deleted allocations for instance 53f759e4-0398-40ef-823a-3028d1ac82b1 [ 1267.609621] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1267.609921] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1267.610149] env[63345]: DEBUG nova.objects.instance [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lazy-loading 'resources' on Instance uuid 53f759e4-0398-40ef-823a-3028d1ac82b1 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1268.113052] env[63345]: DEBUG nova.objects.instance [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lazy-loading 'numa_topology' on Instance uuid 53f759e4-0398-40ef-823a-3028d1ac82b1 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1268.406336] env[63345]: DEBUG nova.compute.manager [req-688962c1-4d0e-409a-9e94-7f8d0d6e49b0 req-2309aff4-7a13-43a3-8a2f-4cacceeccc92 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Received event network-changed-4b3221f1-3c39-4726-b760-339e16d0d89e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1268.406522] env[63345]: DEBUG nova.compute.manager [req-688962c1-4d0e-409a-9e94-7f8d0d6e49b0 req-2309aff4-7a13-43a3-8a2f-4cacceeccc92 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Refreshing instance network info cache due to event network-changed-4b3221f1-3c39-4726-b760-339e16d0d89e. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1268.406746] env[63345]: DEBUG oslo_concurrency.lockutils [req-688962c1-4d0e-409a-9e94-7f8d0d6e49b0 req-2309aff4-7a13-43a3-8a2f-4cacceeccc92 service nova] Acquiring lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1268.406922] env[63345]: DEBUG oslo_concurrency.lockutils [req-688962c1-4d0e-409a-9e94-7f8d0d6e49b0 req-2309aff4-7a13-43a3-8a2f-4cacceeccc92 service nova] Acquired lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.407110] env[63345]: DEBUG nova.network.neutron [req-688962c1-4d0e-409a-9e94-7f8d0d6e49b0 req-2309aff4-7a13-43a3-8a2f-4cacceeccc92 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Refreshing network info cache for port 4b3221f1-3c39-4726-b760-339e16d0d89e {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1268.615677] env[63345]: DEBUG nova.objects.base [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Object Instance<53f759e4-0398-40ef-823a-3028d1ac82b1> lazy-loaded attributes: resources,numa_topology {{(pid=63345) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1268.641009] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e8eba01-6b46-4b6b-9271-08c171a9e7a4 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.648467] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f0ecac-0f84-4e69-9db1-987a44cfafc8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.678567] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aebec106-1cdb-4fbc-acf9-63bb86461e2a {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.685369] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f9382e-5b7d-433a-b0d4-38858ee5d036 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.698161] env[63345]: DEBUG nova.compute.provider_tree [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1269.025822] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "53f759e4-0398-40ef-823a-3028d1ac82b1" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1269.124694] env[63345]: DEBUG nova.network.neutron [req-688962c1-4d0e-409a-9e94-7f8d0d6e49b0 req-2309aff4-7a13-43a3-8a2f-4cacceeccc92 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Updated VIF entry in instance network info cache for port 4b3221f1-3c39-4726-b760-339e16d0d89e. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1269.125063] env[63345]: DEBUG nova.network.neutron [req-688962c1-4d0e-409a-9e94-7f8d0d6e49b0 req-2309aff4-7a13-43a3-8a2f-4cacceeccc92 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Updating instance_info_cache with network_info: [{"id": "4b3221f1-3c39-4726-b760-339e16d0d89e", "address": "fa:16:3e:74:52:98", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap4b3221f1-3c", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1269.200862] env[63345]: DEBUG nova.scheduler.client.report [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1269.628236] env[63345]: DEBUG oslo_concurrency.lockutils [req-688962c1-4d0e-409a-9e94-7f8d0d6e49b0 req-2309aff4-7a13-43a3-8a2f-4cacceeccc92 service nova] Releasing lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1269.705485] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.095s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1270.213197] env[63345]: DEBUG oslo_concurrency.lockutils [None req-fe85e90a-cdb0-46ca-8d6a-8fc6711e283d tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "53f759e4-0398-40ef-823a-3028d1ac82b1" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 20.075s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1270.213967] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "53f759e4-0398-40ef-823a-3028d1ac82b1" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.188s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1270.214179] env[63345]: INFO nova.compute.manager [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Unshelving [ 1270.262266] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.262472] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.262622] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Starting heal instance info cache {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 1270.765248] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Didn't find any instances for network info cache update. {{(pid=63345) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10343}} [ 1270.765706] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.765706] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.765813] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.765933] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.766099] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.766251] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.766384] env[63345]: DEBUG nova.compute.manager [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63345) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 1270.766530] env[63345]: DEBUG oslo_service.periodic_task [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Running periodic task ComputeManager.update_available_resource {{(pid=63345) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1271.237304] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.237597] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1271.237830] env[63345]: DEBUG nova.objects.instance [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lazy-loading 'pci_requests' on Instance uuid 53f759e4-0398-40ef-823a-3028d1ac82b1 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1271.269283] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.742864] env[63345]: DEBUG nova.objects.instance [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lazy-loading 'numa_topology' on Instance uuid 53f759e4-0398-40ef-823a-3028d1ac82b1 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1272.246142] env[63345]: INFO nova.compute.claims [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1273.280616] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293a92f5-0fd8-4816-b7b6-cc4c0c87e1e7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.288956] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ac36d0-fac5-4411-b1e0-9191320c80e3 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.317735] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2fe14f-9280-4358-b863-23374afefec0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.324833] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03c0889-6349-48ce-8e48-165895336b21 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.337797] env[63345]: DEBUG nova.compute.provider_tree [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1273.840870] env[63345]: DEBUG nova.scheduler.client.report [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1274.347425] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.110s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.349584] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.080s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.349770] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.349931] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63345) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1274.351185] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918b12f5-ca00-42c9-a816-419977944519 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.359063] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-057fb706-5033-4461-bb80-dccfefcef7d5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.372422] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d16c58-2039-4181-9d85-efcf4067c062 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.378543] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c2642f-d590-4c08-a460-2dd01b8a6206 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.381933] env[63345]: INFO nova.network.neutron [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Updating port 4b3221f1-3c39-4726-b760-339e16d0d89e with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1274.408796] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180734MB free_disk=187GB free_vcpus=48 pci_devices=None {{(pid=63345) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1274.408956] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.409189] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.430780] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Instance 53f759e4-0398-40ef-823a-3028d1ac82b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63345) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1275.431085] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1275.431169] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63345) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1275.454798] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b20af1-eef0-425f-b099-467f92ed6e2c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.462586] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903c1b2c-b7f5-45ae-b330-29dc466f67b0 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.492743] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81bc1796-18da-4bdc-881a-479e35cb1775 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.499780] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65fe8ff-4e12-418f-a141-abda14f739d5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.512469] env[63345]: DEBUG nova.compute.provider_tree [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1276.015576] env[63345]: DEBUG nova.scheduler.client.report [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1276.266469] env[63345]: DEBUG nova.compute.manager [req-b4c61736-d355-43b0-b58e-ca2685cb8d39 req-54abb3cf-dae8-42d6-8147-347a5427c06d service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Received event network-vif-plugged-4b3221f1-3c39-4726-b760-339e16d0d89e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1276.267103] env[63345]: DEBUG oslo_concurrency.lockutils [req-b4c61736-d355-43b0-b58e-ca2685cb8d39 req-54abb3cf-dae8-42d6-8147-347a5427c06d service nova] Acquiring lock "53f759e4-0398-40ef-823a-3028d1ac82b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.267380] env[63345]: DEBUG oslo_concurrency.lockutils [req-b4c61736-d355-43b0-b58e-ca2685cb8d39 req-54abb3cf-dae8-42d6-8147-347a5427c06d service nova] Lock "53f759e4-0398-40ef-823a-3028d1ac82b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1276.267601] env[63345]: DEBUG oslo_concurrency.lockutils [req-b4c61736-d355-43b0-b58e-ca2685cb8d39 req-54abb3cf-dae8-42d6-8147-347a5427c06d service nova] Lock "53f759e4-0398-40ef-823a-3028d1ac82b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1276.267805] env[63345]: DEBUG nova.compute.manager [req-b4c61736-d355-43b0-b58e-ca2685cb8d39 req-54abb3cf-dae8-42d6-8147-347a5427c06d service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] No waiting events found dispatching network-vif-plugged-4b3221f1-3c39-4726-b760-339e16d0d89e {{(pid=63345) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1276.268030] env[63345]: WARNING nova.compute.manager [req-b4c61736-d355-43b0-b58e-ca2685cb8d39 req-54abb3cf-dae8-42d6-8147-347a5427c06d service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Received unexpected event network-vif-plugged-4b3221f1-3c39-4726-b760-339e16d0d89e for instance with vm_state shelved_offloaded and task_state spawning. [ 1276.351667] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1276.351952] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.352115] env[63345]: DEBUG nova.network.neutron [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Building network info cache for instance {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1276.520973] env[63345]: DEBUG nova.compute.resource_tracker [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63345) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1276.521414] env[63345]: DEBUG oslo_concurrency.lockutils [None req-90b0b1b2-ec70-403a-804c-6d92601f3e8f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.112s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.048728] env[63345]: DEBUG nova.network.neutron [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Updating instance_info_cache with network_info: [{"id": "4b3221f1-3c39-4726-b760-339e16d0d89e", "address": "fa:16:3e:74:52:98", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b3221f1-3c", "ovs_interfaceid": "4b3221f1-3c39-4726-b760-339e16d0d89e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.551490] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1277.578523] env[63345]: DEBUG nova.virt.hardware [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T09:32:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='ba41e618ac403871c49d690b44b07fa7',container_format='bare',created_at=2024-09-30T09:46:33Z,direct_url=,disk_format='vmdk',id=e478039f-650f-4882-b1e8-a69bb1f9adf4,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-883116747-shelved',owner='57e386920081487583ea143003aca8c4',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2024-09-30T09:46:47Z,virtual_size=,visibility=), allow threads: False {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1277.578773] env[63345]: DEBUG nova.virt.hardware [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1277.578940] env[63345]: DEBUG nova.virt.hardware [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image limits 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1277.579149] env[63345]: DEBUG nova.virt.hardware [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Flavor pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1277.579305] env[63345]: DEBUG nova.virt.hardware [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Image pref 0:0:0 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1277.579458] env[63345]: DEBUG nova.virt.hardware [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63345) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1277.579672] env[63345]: DEBUG nova.virt.hardware [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1277.579834] env[63345]: DEBUG nova.virt.hardware [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1277.580016] env[63345]: DEBUG nova.virt.hardware [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Got 1 possible topologies {{(pid=63345) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1277.580194] env[63345]: DEBUG nova.virt.hardware [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1277.580370] env[63345]: DEBUG nova.virt.hardware [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63345) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1277.581245] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f7b985-f4c4-4ab5-957d-096650682bf2 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.588987] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb5b1bc5-783d-40a5-beb7-b262504201bf {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.602792] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:52:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b3221f1-3c39-4726-b760-339e16d0d89e', 'vif_model': 'vmxnet3'}] {{(pid=63345) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1277.610140] env[63345]: DEBUG oslo.service.loopingcall [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1277.610365] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Creating VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1277.610558] env[63345]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da754625-cf16-49a5-97ad-34183b6267c5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.629401] env[63345]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1277.629401] env[63345]: value = "task-1018047" [ 1277.629401] env[63345]: _type = "Task" [ 1277.629401] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.636322] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1018047, 'name': CreateVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.139589] env[63345]: DEBUG oslo_vmware.api [-] Task: {'id': task-1018047, 'name': CreateVM_Task, 'duration_secs': 0.328814} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.139735] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Created VM on the ESX host {{(pid=63345) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1278.146507] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e478039f-650f-4882-b1e8-a69bb1f9adf4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1278.146684] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e478039f-650f-4882-b1e8-a69bb1f9adf4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.147114] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e478039f-650f-4882-b1e8-a69bb1f9adf4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1278.147362] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b920dd9-01ff-4882-a4ca-91d4e2da0adb {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.151655] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1278.151655] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]529b66e2-9235-b3a2-5e36-865c5ad2173f" [ 1278.151655] env[63345]: _type = "Task" [ 1278.151655] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.159141] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]529b66e2-9235-b3a2-5e36-865c5ad2173f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.289919] env[63345]: DEBUG nova.compute.manager [req-131d1dfb-12ad-4bc3-9b48-512e09d907d0 req-646290af-0774-4277-b409-373f02529568 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Received event network-changed-4b3221f1-3c39-4726-b760-339e16d0d89e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1278.290151] env[63345]: DEBUG nova.compute.manager [req-131d1dfb-12ad-4bc3-9b48-512e09d907d0 req-646290af-0774-4277-b409-373f02529568 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Refreshing instance network info cache due to event network-changed-4b3221f1-3c39-4726-b760-339e16d0d89e. {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1278.290380] env[63345]: DEBUG oslo_concurrency.lockutils [req-131d1dfb-12ad-4bc3-9b48-512e09d907d0 req-646290af-0774-4277-b409-373f02529568 service nova] Acquiring lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1278.290532] env[63345]: DEBUG oslo_concurrency.lockutils [req-131d1dfb-12ad-4bc3-9b48-512e09d907d0 req-646290af-0774-4277-b409-373f02529568 service nova] Acquired lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.290699] env[63345]: DEBUG nova.network.neutron [req-131d1dfb-12ad-4bc3-9b48-512e09d907d0 req-646290af-0774-4277-b409-373f02529568 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Refreshing network info cache for port 4b3221f1-3c39-4726-b760-339e16d0d89e {{(pid=63345) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1278.661925] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e478039f-650f-4882-b1e8-a69bb1f9adf4" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1278.662310] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Processing image e478039f-650f-4882-b1e8-a69bb1f9adf4 {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1278.662461] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e478039f-650f-4882-b1e8-a69bb1f9adf4/e478039f-650f-4882-b1e8-a69bb1f9adf4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1278.662632] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e478039f-650f-4882-b1e8-a69bb1f9adf4/e478039f-650f-4882-b1e8-a69bb1f9adf4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.662971] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1278.663113] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39e2b20c-7d58-41fa-9a62-78257d3cb8d5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.680739] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1278.680923] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63345) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1278.681628] env[63345]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c673cf5-95cd-45ed-b290-52a7f4cc6482 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.686563] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1278.686563] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]52a20777-63b6-6aa7-5c14-87a69ec88e15" [ 1278.686563] env[63345]: _type = "Task" [ 1278.686563] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.693691] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': session[52090a46-d3fa-1435-f12f-c4737ae78030]52a20777-63b6-6aa7-5c14-87a69ec88e15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.983801] env[63345]: DEBUG nova.network.neutron [req-131d1dfb-12ad-4bc3-9b48-512e09d907d0 req-646290af-0774-4277-b409-373f02529568 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Updated VIF entry in instance network info cache for port 4b3221f1-3c39-4726-b760-339e16d0d89e. {{(pid=63345) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1278.984277] env[63345]: DEBUG nova.network.neutron [req-131d1dfb-12ad-4bc3-9b48-512e09d907d0 req-646290af-0774-4277-b409-373f02529568 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Updating instance_info_cache with network_info: [{"id": "4b3221f1-3c39-4726-b760-339e16d0d89e", "address": "fa:16:3e:74:52:98", "network": {"id": "dffa0b34-9323-42eb-aeb1-e32aebcb75c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1826417035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "57e386920081487583ea143003aca8c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94e1d797-8eb2-4400-9f7d-f2eb60eb4cf2", "external-id": "nsx-vlan-transportzone-828", "segmentation_id": 828, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b3221f1-3c", "ovs_interfaceid": "4b3221f1-3c39-4726-b760-339e16d0d89e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1279.196061] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Preparing fetch location {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1279.196364] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Fetch image to [datastore2] OSTACK_IMG_49383081-80b2-4ea4-af13-d0e954c4007f/OSTACK_IMG_49383081-80b2-4ea4-af13-d0e954c4007f.vmdk {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1279.196560] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Downloading stream optimized image e478039f-650f-4882-b1e8-a69bb1f9adf4 to [datastore2] OSTACK_IMG_49383081-80b2-4ea4-af13-d0e954c4007f/OSTACK_IMG_49383081-80b2-4ea4-af13-d0e954c4007f.vmdk on the data store datastore2 as vApp {{(pid=63345) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1279.196739] env[63345]: DEBUG nova.virt.vmwareapi.images [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Downloading image file data e478039f-650f-4882-b1e8-a69bb1f9adf4 to the ESX as VM named 'OSTACK_IMG_49383081-80b2-4ea4-af13-d0e954c4007f' {{(pid=63345) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1279.260328] env[63345]: DEBUG oslo_vmware.rw_handles [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1279.260328] env[63345]: value = "resgroup-9" [ 1279.260328] env[63345]: _type = "ResourcePool" [ 1279.260328] env[63345]: }. {{(pid=63345) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1279.260604] env[63345]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-f74653cf-e04a-494a-9d0e-d2715dec8580 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.280542] env[63345]: DEBUG oslo_vmware.rw_handles [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lease: (returnval){ [ 1279.280542] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5285f17e-79d7-f181-1dc9-ccb131f559a5" [ 1279.280542] env[63345]: _type = "HttpNfcLease" [ 1279.280542] env[63345]: } obtained for vApp import into resource pool (val){ [ 1279.280542] env[63345]: value = "resgroup-9" [ 1279.280542] env[63345]: _type = "ResourcePool" [ 1279.280542] env[63345]: }. {{(pid=63345) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1279.280917] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the lease: (returnval){ [ 1279.280917] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5285f17e-79d7-f181-1dc9-ccb131f559a5" [ 1279.280917] env[63345]: _type = "HttpNfcLease" [ 1279.280917] env[63345]: } to be ready. {{(pid=63345) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1279.287787] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1279.287787] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5285f17e-79d7-f181-1dc9-ccb131f559a5" [ 1279.287787] env[63345]: _type = "HttpNfcLease" [ 1279.287787] env[63345]: } is initializing. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1279.487453] env[63345]: DEBUG oslo_concurrency.lockutils [req-131d1dfb-12ad-4bc3-9b48-512e09d907d0 req-646290af-0774-4277-b409-373f02529568 service nova] Releasing lock "refresh_cache-53f759e4-0398-40ef-823a-3028d1ac82b1" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1279.788596] env[63345]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1279.788596] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5285f17e-79d7-f181-1dc9-ccb131f559a5" [ 1279.788596] env[63345]: _type = "HttpNfcLease" [ 1279.788596] env[63345]: } is ready. {{(pid=63345) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1279.789139] env[63345]: DEBUG oslo_vmware.rw_handles [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1279.789139] env[63345]: value = "session[52090a46-d3fa-1435-f12f-c4737ae78030]5285f17e-79d7-f181-1dc9-ccb131f559a5" [ 1279.789139] env[63345]: _type = "HttpNfcLease" [ 1279.789139] env[63345]: }. {{(pid=63345) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1279.789663] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52a4ada-a6a2-48ea-89a1-e6d9de929c95 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.796354] env[63345]: DEBUG oslo_vmware.rw_handles [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5232cad8-c928-f1c2-a8cd-a4e861352b5d/disk-0.vmdk from lease info. {{(pid=63345) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1279.796535] env[63345]: DEBUG oslo_vmware.rw_handles [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5232cad8-c928-f1c2-a8cd-a4e861352b5d/disk-0.vmdk. {{(pid=63345) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1279.860368] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0b99e59d-244e-4f9d-9190-440678c3d025 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.917848] env[63345]: DEBUG oslo_vmware.rw_handles [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Completed reading data from the image iterator. {{(pid=63345) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1280.918337] env[63345]: DEBUG oslo_vmware.rw_handles [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5232cad8-c928-f1c2-a8cd-a4e861352b5d/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1280.919075] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f20898-5a38-4709-8ca8-9af3c42e66ef {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.926359] env[63345]: DEBUG oslo_vmware.rw_handles [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5232cad8-c928-f1c2-a8cd-a4e861352b5d/disk-0.vmdk is in state: ready. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1280.926359] env[63345]: DEBUG oslo_vmware.rw_handles [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5232cad8-c928-f1c2-a8cd-a4e861352b5d/disk-0.vmdk. {{(pid=63345) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1280.926359] env[63345]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-3bb20761-0658-4d92-b12e-d282d4d7ff54 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.124281] env[63345]: DEBUG oslo_vmware.rw_handles [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5232cad8-c928-f1c2-a8cd-a4e861352b5d/disk-0.vmdk. {{(pid=63345) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1281.124857] env[63345]: INFO nova.virt.vmwareapi.images [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Downloaded image file data e478039f-650f-4882-b1e8-a69bb1f9adf4 [ 1281.125607] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2adbba17-064e-4347-8d46-095d960c4e94 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.141902] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b6c90df-8e3e-4cba-8a31-97b2b4fbfd1e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.190769] env[63345]: INFO nova.virt.vmwareapi.images [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] The imported VM was unregistered [ 1281.194189] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Caching image {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1281.194459] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Creating directory with path [datastore2] devstack-image-cache_base/e478039f-650f-4882-b1e8-a69bb1f9adf4 {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1281.194824] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d1b75561-636e-4f6f-ac2e-ec4c75deb8c1 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.213629] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Created directory with path [datastore2] devstack-image-cache_base/e478039f-650f-4882-b1e8-a69bb1f9adf4 {{(pid=63345) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1281.213825] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_49383081-80b2-4ea4-af13-d0e954c4007f/OSTACK_IMG_49383081-80b2-4ea4-af13-d0e954c4007f.vmdk to [datastore2] devstack-image-cache_base/e478039f-650f-4882-b1e8-a69bb1f9adf4/e478039f-650f-4882-b1e8-a69bb1f9adf4.vmdk. {{(pid=63345) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1281.214092] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-19201d89-ae9a-4557-81a7-c337958e0fd5 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.220011] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1281.220011] env[63345]: value = "task-1018050" [ 1281.220011] env[63345]: _type = "Task" [ 1281.220011] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.227037] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018050, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.730166] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018050, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.230507] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018050, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.731572] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018050, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.232258] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018050, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.733109] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018050, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.269716} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.733109] env[63345]: INFO nova.virt.vmwareapi.ds_util [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_49383081-80b2-4ea4-af13-d0e954c4007f/OSTACK_IMG_49383081-80b2-4ea4-af13-d0e954c4007f.vmdk to [datastore2] devstack-image-cache_base/e478039f-650f-4882-b1e8-a69bb1f9adf4/e478039f-650f-4882-b1e8-a69bb1f9adf4.vmdk. [ 1283.733109] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Cleaning up location [datastore2] OSTACK_IMG_49383081-80b2-4ea4-af13-d0e954c4007f {{(pid=63345) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1283.733382] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_49383081-80b2-4ea4-af13-d0e954c4007f {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1283.733382] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73a7e584-b024-4837-bdf1-16317f195edd {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.739384] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1283.739384] env[63345]: value = "task-1018051" [ 1283.739384] env[63345]: _type = "Task" [ 1283.739384] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.746275] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018051, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.250205] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018051, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.041461} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.250644] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1284.250644] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e478039f-650f-4882-b1e8-a69bb1f9adf4/e478039f-650f-4882-b1e8-a69bb1f9adf4.vmdk" {{(pid=63345) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1284.250874] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e478039f-650f-4882-b1e8-a69bb1f9adf4/e478039f-650f-4882-b1e8-a69bb1f9adf4.vmdk to [datastore2] 53f759e4-0398-40ef-823a-3028d1ac82b1/53f759e4-0398-40ef-823a-3028d1ac82b1.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1284.251141] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad5d27e4-70fb-4f95-a5dc-6b4291f9d801 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.257201] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1284.257201] env[63345]: value = "task-1018052" [ 1284.257201] env[63345]: _type = "Task" [ 1284.257201] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.264209] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018052, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.767677] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018052, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.275963] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018052, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.770475] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018052, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.270851] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018052, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.771195] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018052, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.20183} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.771544] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e478039f-650f-4882-b1e8-a69bb1f9adf4/e478039f-650f-4882-b1e8-a69bb1f9adf4.vmdk to [datastore2] 53f759e4-0398-40ef-823a-3028d1ac82b1/53f759e4-0398-40ef-823a-3028d1ac82b1.vmdk {{(pid=63345) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 1286.772265] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d02c9c34-d5c8-4ca2-8843-d5a78c210f12 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.794759] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] 53f759e4-0398-40ef-823a-3028d1ac82b1/53f759e4-0398-40ef-823a-3028d1ac82b1.vmdk or device None with type streamOptimized {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1286.795053] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4bab248f-bd0b-48dc-9537-6bc918b16367 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.813559] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1286.813559] env[63345]: value = "task-1018053" [ 1286.813559] env[63345]: _type = "Task" [ 1286.813559] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.820965] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018053, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.323173] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018053, 'name': ReconfigVM_Task, 'duration_secs': 0.336654} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.323457] env[63345]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Reconfigured VM instance instance-00000076 to attach disk [datastore2] 53f759e4-0398-40ef-823a-3028d1ac82b1/53f759e4-0398-40ef-823a-3028d1ac82b1.vmdk or device None with type streamOptimized {{(pid=63345) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1287.324084] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac8cd210-4d03-4f35-b5ab-0bc8d5c26a0d {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.330176] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1287.330176] env[63345]: value = "task-1018054" [ 1287.330176] env[63345]: _type = "Task" [ 1287.330176] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.337200] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018054, 'name': Rename_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.840980] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018054, 'name': Rename_Task, 'duration_secs': 0.268332} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.840980] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Powering on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1287.840980] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4c89a292-81ea-4d1c-bc9d-578b508657f7 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.847646] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1287.847646] env[63345]: value = "task-1018055" [ 1287.847646] env[63345]: _type = "Task" [ 1287.847646] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.855500] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018055, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.358062] env[63345]: DEBUG oslo_vmware.api [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018055, 'name': PowerOnVM_Task, 'duration_secs': 0.457971} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.358062] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Powered on the VM {{(pid=63345) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1288.451798] env[63345]: DEBUG nova.compute.manager [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Checking state {{(pid=63345) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1288.452730] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d07804b-11f7-4ce7-93e6-4e938851f90e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.969688] env[63345]: DEBUG oslo_concurrency.lockutils [None req-b7f92a60-3a41-4ee2-8068-8e8d3bfad8aa tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "53f759e4-0398-40ef-823a-3028d1ac82b1" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 18.756s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1289.710361] env[63345]: DEBUG oslo_concurrency.lockutils [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "53f759e4-0398-40ef-823a-3028d1ac82b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.710661] env[63345]: DEBUG oslo_concurrency.lockutils [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "53f759e4-0398-40ef-823a-3028d1ac82b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.710882] env[63345]: DEBUG oslo_concurrency.lockutils [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "53f759e4-0398-40ef-823a-3028d1ac82b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.711098] env[63345]: DEBUG oslo_concurrency.lockutils [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "53f759e4-0398-40ef-823a-3028d1ac82b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.711295] env[63345]: DEBUG oslo_concurrency.lockutils [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "53f759e4-0398-40ef-823a-3028d1ac82b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1289.713475] env[63345]: INFO nova.compute.manager [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Terminating instance [ 1290.217578] env[63345]: DEBUG nova.compute.manager [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Start destroying the instance on the hypervisor. {{(pid=63345) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1290.217943] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Destroying instance {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1290.218780] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81443121-e4bc-40b1-9dba-c8a079c12d4c {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.226606] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Powering off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1290.226830] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22d6745f-f508-4f2e-8838-19fbbe650934 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.233017] env[63345]: DEBUG oslo_vmware.api [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1290.233017] env[63345]: value = "task-1018056" [ 1290.233017] env[63345]: _type = "Task" [ 1290.233017] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.241645] env[63345]: DEBUG oslo_vmware.api [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018056, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.742821] env[63345]: DEBUG oslo_vmware.api [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018056, 'name': PowerOffVM_Task, 'duration_secs': 0.203585} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.743105] env[63345]: DEBUG nova.virt.vmwareapi.vm_util [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Powered off the VM {{(pid=63345) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1290.743284] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Unregistering the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1290.743526] env[63345]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-db93b085-ccc1-4170-8376-217deac586bc {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.813020] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Unregistered the VM {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1290.813259] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Deleting contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1290.813447] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Deleting the datastore file [datastore2] 53f759e4-0398-40ef-823a-3028d1ac82b1 {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1290.813697] env[63345]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db8f3f4d-bd5d-4a6e-baba-b8013764c942 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.819717] env[63345]: DEBUG oslo_vmware.api [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for the task: (returnval){ [ 1290.819717] env[63345]: value = "task-1018058" [ 1290.819717] env[63345]: _type = "Task" [ 1290.819717] env[63345]: } to complete. {{(pid=63345) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.826786] env[63345]: DEBUG oslo_vmware.api [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.329920] env[63345]: DEBUG oslo_vmware.api [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Task: {'id': task-1018058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13283} completed successfully. {{(pid=63345) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.330335] env[63345]: DEBUG nova.virt.vmwareapi.ds_util [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Deleted the datastore file {{(pid=63345) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1291.330436] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Deleted contents of the VM from datastore datastore2 {{(pid=63345) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1291.330576] env[63345]: DEBUG nova.virt.vmwareapi.vmops [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Instance destroyed {{(pid=63345) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1291.330757] env[63345]: INFO nova.compute.manager [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1291.330994] env[63345]: DEBUG oslo.service.loopingcall [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63345) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1291.331235] env[63345]: DEBUG nova.compute.manager [-] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Deallocating network for instance {{(pid=63345) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1291.331333] env[63345]: DEBUG nova.network.neutron [-] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] deallocate_for_instance() {{(pid=63345) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1291.767990] env[63345]: DEBUG nova.compute.manager [req-37b05400-c355-4e85-9785-cc5b8fbc14ed req-d533ba86-4c4b-41c0-ab9f-a5e82e4dfbc4 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Received event network-vif-deleted-4b3221f1-3c39-4726-b760-339e16d0d89e {{(pid=63345) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1291.768248] env[63345]: INFO nova.compute.manager [req-37b05400-c355-4e85-9785-cc5b8fbc14ed req-d533ba86-4c4b-41c0-ab9f-a5e82e4dfbc4 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Neutron deleted interface 4b3221f1-3c39-4726-b760-339e16d0d89e; detaching it from the instance and deleting it from the info cache [ 1291.768445] env[63345]: DEBUG nova.network.neutron [req-37b05400-c355-4e85-9785-cc5b8fbc14ed req-d533ba86-4c4b-41c0-ab9f-a5e82e4dfbc4 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1292.248385] env[63345]: DEBUG nova.network.neutron [-] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Updating instance_info_cache with network_info: [] {{(pid=63345) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1292.271239] env[63345]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a335c4bd-9f49-4d98-a10f-bf009970e295 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.280321] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed18f597-956f-45a4-a29f-8f3474613d9e {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.301837] env[63345]: DEBUG nova.compute.manager [req-37b05400-c355-4e85-9785-cc5b8fbc14ed req-d533ba86-4c4b-41c0-ab9f-a5e82e4dfbc4 service nova] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Detach interface failed, port_id=4b3221f1-3c39-4726-b760-339e16d0d89e, reason: Instance 53f759e4-0398-40ef-823a-3028d1ac82b1 could not be found. {{(pid=63345) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1292.751589] env[63345]: INFO nova.compute.manager [-] [instance: 53f759e4-0398-40ef-823a-3028d1ac82b1] Took 1.42 seconds to deallocate network for instance. [ 1293.258142] env[63345]: DEBUG oslo_concurrency.lockutils [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.258444] env[63345]: DEBUG oslo_concurrency.lockutils [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.258683] env[63345]: DEBUG nova.objects.instance [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lazy-loading 'resources' on Instance uuid 53f759e4-0398-40ef-823a-3028d1ac82b1 {{(pid=63345) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1293.791384] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce12fa8c-4939-4789-940d-1db12cfbe693 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.798961] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-164c4159-6055-46f5-9208-cc8d92f908f9 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.828313] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4923766-1ef5-4487-8d00-1d1e1b19dbe8 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.835276] env[63345]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad692b4-abc0-436e-b446-9d683dea0d56 {{(pid=63345) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.847738] env[63345]: DEBUG nova.compute.provider_tree [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Inventory has not changed in ProviderTree for provider: fc35ddde-c15e-4ab8-bf77-a06ae0805b57 {{(pid=63345) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1294.350274] env[63345]: DEBUG nova.scheduler.client.report [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Inventory has not changed for provider fc35ddde-c15e-4ab8-bf77-a06ae0805b57 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 187, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63345) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1294.855779] env[63345]: DEBUG oslo_concurrency.lockutils [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.597s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1294.876999] env[63345]: INFO nova.scheduler.client.report [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Deleted allocations for instance 53f759e4-0398-40ef-823a-3028d1ac82b1 [ 1295.385400] env[63345]: DEBUG oslo_concurrency.lockutils [None req-adbffc17-ff31-490b-a61f-cfcf459ca783 tempest-ServerActionsTestOtherB-1518567629 tempest-ServerActionsTestOtherB-1518567629-project-member] Lock "53f759e4-0398-40ef-823a-3028d1ac82b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.675s {{(pid=63345) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}